From a51ebb836d71eb4fb43b6d219059648d878b582d Mon Sep 17 00:00:00 2001 From: olloz26 Date: Wed, 10 Jul 2024 15:52:01 +0200 Subject: [PATCH 01/15] feat: add custom openBIS type --- .../renku_data_services/storage/rclone.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 58fac1171..3945c2ed2 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -126,6 +126,57 @@ def __patch_schema_add_switch_provider(spec: list[dict[str, Any]]) -> None: ) existing_endpoint_spec["Provider"] += ",Switch" + @staticmethod + def __patch_schema_add_openbis_type(spec: list[dict[str, Any]]) -> None: + """Adds a fake type to help with setting up openBIS storage.""" + spec.append({ + "Name": "openbis", + "Description": "openBIS", + "Prefix": "openbis", + "Options": [ + { + "Name": "host", + "Help": "openBIS host to connect to.\n\nE.g. \"openbis-eln-lims.ethz.ch\".", + "Provider": "", + "Default": "", + "Value": None, + "ShortOpt": "", + "Hide": 0, + "Required": True, + "IsPassword": False, + "NoPrefix": False, + "Advanced": False, + "Exclusive": False, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "session_token", + "Help": "openBIS session token", + "Provider": "", + "Default": "", + "Value": None, + "ShortOpt": "", + "Hide": 0, + "Required": False, + "IsPassword": True, + "NoPrefix": False, + "Advanced": False, + "Exclusive": False, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + ], + "CommandHelp": None, + "Aliases": None, + "Hide": False, + "MetadataInfo": None, + }) + @staticmethod def __patch_schema_remove_oauth_propeties(spec: list[dict[str, Any]]) -> None: """Removes OAuth2 fields since we can't do an oauth flow in the rclone CSI.""" From ea9c818c5c7306911144bfe61e15389bc5c91c9a Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 11 Jul 2024 15:59:18 +0200 Subject: [PATCH 02/15] style: fix --- .../renku_data_services/storage/rclone.py | 96 ++++++++++--------- 1 file changed, 49 insertions(+), 47 deletions(-) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 3945c2ed2..2c9f2ad34 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -129,53 +129,55 @@ def __patch_schema_add_switch_provider(spec: list[dict[str, Any]]) -> None: @staticmethod def __patch_schema_add_openbis_type(spec: list[dict[str, Any]]) -> None: """Adds a fake type to help with setting up openBIS storage.""" - spec.append({ - "Name": "openbis", - "Description": "openBIS", - "Prefix": "openbis", - "Options": [ - { - "Name": "host", - "Help": "openBIS host to connect to.\n\nE.g. \"openbis-eln-lims.ethz.ch\".", - "Provider": "", - "Default": "", - "Value": None, - "ShortOpt": "", - "Hide": 0, - "Required": True, - "IsPassword": False, - "NoPrefix": False, - "Advanced": False, - "Exclusive": False, - "Sensitive": False, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "session_token", - "Help": "openBIS session token", - "Provider": "", - "Default": "", - "Value": None, - "ShortOpt": "", - "Hide": 0, - "Required": False, - "IsPassword": True, - "NoPrefix": False, - "Advanced": False, - "Exclusive": False, - "Sensitive": False, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - ], - "CommandHelp": None, - "Aliases": None, - "Hide": False, - "MetadataInfo": None, - }) + spec.append( + { + "Name": "openbis", + "Description": "openBIS", + "Prefix": "openbis", + "Options": [ + { + "Name": "host", + "Help": 'openBIS host to connect to.\n\nE.g. "openbis-eln-lims.ethz.ch".', + "Provider": "", + "Default": "", + "Value": None, + "ShortOpt": "", + "Hide": 0, + "Required": True, + "IsPassword": False, + "NoPrefix": False, + "Advanced": False, + "Exclusive": False, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + "Type": "string", + }, + { + "Name": "session_token", + "Help": "openBIS session token", + "Provider": "", + "Default": "", + "Value": None, + "ShortOpt": "", + "Hide": 0, + "Required": False, + "IsPassword": True, + "NoPrefix": False, + "Advanced": False, + "Exclusive": False, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + "Type": "string", + }, + ], + "CommandHelp": None, + "Aliases": None, + "Hide": False, + "MetadataInfo": None, + } + ) @staticmethod def __patch_schema_remove_oauth_propeties(spec: list[dict[str, Any]]) -> None: From d47a2bddec2b37a6df9254ce38c9bba458268682 Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 11 Jul 2024 18:21:09 +0200 Subject: [PATCH 03/15] feat: add openBIS PAT request tool --- components/renku_data_services/utils/core.py | 52 ++++++++++++++++++++ 1 file changed, 52 insertions(+) diff --git a/components/renku_data_services/utils/core.py b/components/renku_data_services/utils/core.py index 4d3362bd4..b25418d97 100644 --- a/components/renku_data_services/utils/core.py +++ b/components/renku_data_services/utils/core.py @@ -1,6 +1,7 @@ """Shared utility functions.""" import functools +import datetime import os import ssl from collections.abc import Awaitable, Callable @@ -90,3 +91,54 @@ async def transaction_wrapper(self: _WithSessionMaker, *args: _P.args, **kwargs: return await f(self, *args, **kwargs) return transaction_wrapper + + +async def get_openbis_pat( + host: str, + session_id: str, + personal_access_token_session_name: str = "renku", + minimum_validity_in_days: int = 2, + timeout: int = 12, +) -> str: + url = f"https://{host}/openbis/openbis/rmi-application-server-v3.json" + + get_server_information = {"method": "getServerInformation", "params": [session_id], "id": "2", "jsonrpc": "2.0"} + + async with httpx.AsyncClient(verify=get_ssl_context()) as client: + response = await client.post(url, json=get_server_information, timeout=timeout) + if response.status_code == 200: + json1: dict[str, dict[str, str]] = response.json() + personal_access_tokens_max_validity_period = int( + json1["result"]["personal-access-tokens-max-validity-period"] + ) + + valid_from = datetime.datetime.now() + valid_to = valid_from + datetime.timedelta(seconds=personal_access_tokens_max_validity_period) + validity_in_days = (valid_to - valid_from).days + if validity_in_days >= minimum_validity_in_days: + create_personal_access_tokens = { + "method": "createPersonalAccessTokens", + "params": [ + session_id, + { + "@type": "as.dto.pat.create.PersonalAccessTokenCreation", + "sessionName": personal_access_token_session_name, + "validFromDate": int(valid_from.timestamp() * 1000), + "validToDate": int(valid_to.timestamp() * 1000), + }, + ], + "id": "2", + "jsonrpc": "2.0", + } + + response = await client.post(url, json=create_personal_access_tokens, timeout=timeout) + + if response.status_code == 200: + json2: dict[str, list[dict[str, str]]] = response.json() + return json2["result"][0]["permId"] + else: + raise Exception( + f"The maximum allowed validity period of a personal access token is less than {minimum_validity_in_days} days." + ) + + raise Exception("An openBIS personal access token related request failed.") From 6b986f4aaf9bb1957b44d621c4907c49a049d5cc Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 11 Jul 2024 18:42:10 +0200 Subject: [PATCH 04/15] style: fix --- components/renku_data_services/utils/core.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/components/renku_data_services/utils/core.py b/components/renku_data_services/utils/core.py index b25418d97..aa57eddaa 100644 --- a/components/renku_data_services/utils/core.py +++ b/components/renku_data_services/utils/core.py @@ -1,7 +1,7 @@ """Shared utility functions.""" -import functools import datetime +import functools import os import ssl from collections.abc import Awaitable, Callable @@ -100,6 +100,7 @@ async def get_openbis_pat( minimum_validity_in_days: int = 2, timeout: int = 12, ) -> str: + """Requests an openBIS PAT with an openBIS session ID.""" url = f"https://{host}/openbis/openbis/rmi-application-server-v3.json" get_server_information = {"method": "getServerInformation", "params": [session_id], "id": "2", "jsonrpc": "2.0"} @@ -138,7 +139,8 @@ async def get_openbis_pat( return json2["result"][0]["permId"] else: raise Exception( - f"The maximum allowed validity period of a personal access token is less than {minimum_validity_in_days} days." + "The maximum allowed validity period of a personal access token is less than " + f"{minimum_validity_in_days} days." ) raise Exception("An openBIS personal access token related request failed.") From bb23d6e66b6ed27427f5eae63b1fbc6acc0d70f6 Mon Sep 17 00:00:00 2001 From: olloz26 Date: Wed, 31 Jul 2024 13:25:22 +0200 Subject: [PATCH 05/15] fix: correct a property value --- components/renku_data_services/storage/rclone.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 2c9f2ad34..716acacb8 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -166,7 +166,7 @@ def __patch_schema_add_openbis_type(spec: list[dict[str, Any]]) -> None: "NoPrefix": False, "Advanced": False, "Exclusive": False, - "Sensitive": False, + "Sensitive": True, "DefaultStr": "", "ValueStr": "", "Type": "string", From b08f2d09982483da188d176610964f14da0582ca Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 22 Aug 2024 16:12:33 +0200 Subject: [PATCH 06/15] feat: add expiration timestamps for secrets --- DEVELOPING.md | 6 +- ...829ed2f_add_secret_expiration_timestamp.py | 30 +++++ components/renku_data_services/secrets/db.py | 42 ++++-- .../renku_data_services/secrets/models.py | 1 + components/renku_data_services/secrets/orm.py | 10 +- .../renku_data_services/storage/rclone.py | 4 +- .../renku_data_services/users/api.spec.yaml | 22 +++- .../renku_data_services/users/apispec.py | 17 ++- .../renku_data_services/users/blueprints.py | 31 +++-- components/renku_data_services/utils/core.py | 89 +++++++------ .../data_api/test_secret.py | 122 ++++++++++++++---- .../data_api/test_storage_v2.py | 0 test/conftest.py | 12 ++ 13 files changed, 296 insertions(+), 90 deletions(-) create mode 100644 components/renku_data_services/migrations/versions/7bc32829ed2f_add_secret_expiration_timestamp.py create mode 100644 test/bases/renku_data_services/data_api/test_storage_v2.py diff --git a/DEVELOPING.md b/DEVELOPING.md index 55e7616b3..059189296 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -115,8 +115,10 @@ function if you prefer to keep your favorite shell. ## Running Tests You can run style checks using `make style_checks`. -To run the test test suite, use `make tests` (you likely need to run in the devcontainer for this to work, as it needs -some surrounding services to run). +To run the test suite, use `make tests` (you likely need to run in the devcontainer for this to work, as it needs some +surrounding services to run). +* Run a specific test e.g.: `poetry run pytest test/bases/renku_data_services/data_api/test_storage_v2.py::test_storage_v2_create_openbis_secret` +* Also run tests marked with `@pytest.mark.myskip`: `PYTEST_FORCE_RUN_MYSKIPS=1 make tests` ## Migrations diff --git a/components/renku_data_services/migrations/versions/7bc32829ed2f_add_secret_expiration_timestamp.py b/components/renku_data_services/migrations/versions/7bc32829ed2f_add_secret_expiration_timestamp.py new file mode 100644 index 000000000..0812b2adb --- /dev/null +++ b/components/renku_data_services/migrations/versions/7bc32829ed2f_add_secret_expiration_timestamp.py @@ -0,0 +1,30 @@ +"""add_secret_expiration_timestamp + +Revision ID: 7bc32829ed2f +Revises: 9058bf0a1a12 +Create Date: 2024-08-21 12:38:30.932694 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "7bc32829ed2f" +down_revision = "9058bf0a1a12" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "secrets", sa.Column("expiration_timestamp", sa.DateTime(timezone=True), nullable=True), schema="secrets" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("secrets", "expiration_timestamp", schema="secrets") + # ### end Alembic commands ### diff --git a/components/renku_data_services/secrets/db.py b/components/renku_data_services/secrets/db.py index 2fdf8fe4e..45f32d3e9 100644 --- a/components/renku_data_services/secrets/db.py +++ b/components/renku_data_services/secrets/db.py @@ -1,10 +1,10 @@ """Database repo for secrets.""" from collections.abc import AsyncGenerator, Callable, Sequence -from datetime import UTC, datetime +from datetime import UTC, datetime, timedelta from typing import cast -from sqlalchemy import delete, select +from sqlalchemy import Select, delete, or_, select from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession from ulid import ULID @@ -25,11 +25,23 @@ def __init__( ) -> None: self.session_maker = session_maker + def _get_stmt(self, requested_by: APIUser) -> Select[tuple[SecretORM]]: + return ( + select(SecretORM) + .where(SecretORM.user_id == requested_by.id) + .where( + or_( + SecretORM.expiration_timestamp.is_(None), + SecretORM.expiration_timestamp > datetime.now(UTC) + timedelta(seconds=120), + ) + ) + ) + @only_authenticated async def get_user_secrets(self, requested_by: APIUser, kind: SecretKind) -> list[Secret]: """Get all user's secrets from the database.""" async with self.session_maker() as session: - stmt = select(SecretORM).where(SecretORM.user_id == requested_by.id).where(SecretORM.kind == kind) + stmt = self._get_stmt(requested_by).where(SecretORM.kind == kind) res = await session.execute(stmt) orm = res.scalars().all() return [o.dump() for o in orm] @@ -38,7 +50,7 @@ async def get_user_secrets(self, requested_by: APIUser, kind: SecretKind) -> lis async def get_secret_by_id(self, requested_by: APIUser, secret_id: ULID) -> Secret | None: """Get a specific user secret from the database.""" async with self.session_maker() as session: - stmt = select(SecretORM).where(SecretORM.user_id == requested_by.id).where(SecretORM.id == secret_id) + stmt = self._get_stmt(requested_by).where(SecretORM.id == secret_id) res = await session.execute(stmt) orm = res.scalar_one_or_none() if orm is None: @@ -66,6 +78,7 @@ async def insert_secret(self, requested_by: APIUser, secret: UnsavedSecret) -> S encrypted_value=secret.encrypted_value, encrypted_key=secret.encrypted_key, kind=secret.kind, + expiration_timestamp=secret.expiration_timestamp, ) session.add(orm) @@ -83,19 +96,26 @@ async def insert_secret(self, requested_by: APIUser, secret: UnsavedSecret) -> S @only_authenticated async def update_secret( - self, requested_by: APIUser, secret_id: ULID, encrypted_value: bytes, encrypted_key: bytes + self, + requested_by: APIUser, + secret_id: ULID, + encrypted_value: bytes, + encrypted_key: bytes, + expiration_timestamp: datetime | None, ) -> Secret: """Update a secret.""" async with self.session_maker() as session, session.begin(): - result = await session.execute( - select(SecretORM).where(SecretORM.id == secret_id).where(SecretORM.user_id == requested_by.id) - ) + result = await session.execute(self._get_stmt(requested_by).where(SecretORM.id == secret_id)) secret = result.scalar_one_or_none() if secret is None: raise errors.MissingResourceError(message=f"The secret with id '{secret_id}' cannot be found") - secret.update(encrypted_value=encrypted_value, encrypted_key=encrypted_key) + secret.update( + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + expiration_timestamp=expiration_timestamp, + ) return secret.dump() @only_authenticated @@ -103,9 +123,7 @@ async def delete_secret(self, requested_by: APIUser, secret_id: ULID) -> None: """Delete a secret.""" async with self.session_maker() as session, session.begin(): - result = await session.execute( - select(SecretORM).where(SecretORM.id == secret_id).where(SecretORM.user_id == requested_by.id) - ) + result = await session.execute(self._get_stmt(requested_by).where(SecretORM.id == secret_id)) secret = result.scalar_one_or_none() if secret is None: return None diff --git a/components/renku_data_services/secrets/models.py b/components/renku_data_services/secrets/models.py index cc7d46e8f..491a25d01 100644 --- a/components/renku_data_services/secrets/models.py +++ b/components/renku_data_services/secrets/models.py @@ -24,6 +24,7 @@ class UnsavedSecret(BaseModel): encrypted_key: bytes = Field(repr=False) modification_date: datetime = Field(default_factory=lambda: datetime.now(UTC).replace(microsecond=0), init=False) kind: SecretKind + expiration_timestamp: datetime | None = Field(default=None) class Secret(UnsavedSecret): diff --git a/components/renku_data_services/secrets/orm.py b/components/renku_data_services/secrets/orm.py index 5f82d94a1..6b872dc94 100644 --- a/components/renku_data_services/secrets/orm.py +++ b/components/renku_data_services/secrets/orm.py @@ -35,6 +35,9 @@ class SecretORM(BaseORM): encrypted_value: Mapped[bytes] = mapped_column(LargeBinary()) encrypted_key: Mapped[bytes] = mapped_column(LargeBinary()) kind: Mapped[models.SecretKind] + expiration_timestamp: Mapped[Optional[datetime]] = mapped_column( + "expiration_timestamp", DateTime(timezone=True), default=None, nullable=True + ) modification_date: Mapped[datetime] = mapped_column( "modification_date", DateTime(timezone=True), default_factory=lambda: datetime.now(UTC).replace(microsecond=0) ) @@ -51,6 +54,7 @@ def dump(self) -> models.Secret: encrypted_value=self.encrypted_value, encrypted_key=self.encrypted_key, kind=self.kind, + expiration_timestamp=self.expiration_timestamp, ) secret.modification_date = self.modification_date return secret @@ -62,12 +66,14 @@ def load(cls, secret: models.UnsavedSecret) -> "SecretORM": name=secret.name, encrypted_value=secret.encrypted_value, encrypted_key=secret.encrypted_key, - modification_date=secret.modification_date, kind=secret.kind, + expiration_timestamp=secret.expiration_timestamp, + modification_date=secret.modification_date, ) - def update(self, encrypted_value: bytes, encrypted_key: bytes) -> None: + def update(self, encrypted_value: bytes, encrypted_key: bytes, expiration_timestamp: datetime | None) -> None: """Update an existing secret.""" self.encrypted_value = encrypted_value self.encrypted_key = encrypted_key + self.expiration_timestamp = expiration_timestamp self.modification_date = datetime.now(UTC).replace(microsecond=0) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 716acacb8..56d54a4eb 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -161,7 +161,7 @@ def __patch_schema_add_openbis_type(spec: list[dict[str, Any]]) -> None: "Value": None, "ShortOpt": "", "Hide": 0, - "Required": False, + "Required": True, "IsPassword": True, "NoPrefix": False, "Advanced": False, @@ -416,7 +416,7 @@ class RCloneProviderSchema(BaseModel): @property def required_options(self) -> list[RCloneOption]: """Returns all required options for this provider.""" - return [o for o in self.options if o.required] + return [o for o in self.options if o.required and not o.sensitive] @property def sensitive_options(self) -> list[RCloneOption]: diff --git a/components/renku_data_services/users/api.spec.yaml b/components/renku_data_services/users/api.spec.yaml index 3bb060852..db688b719 100644 --- a/components/renku_data_services/users/api.spec.yaml +++ b/components/renku_data_services/users/api.spec.yaml @@ -418,20 +418,23 @@ components: $ref: "#/components/schemas/Ulid" name: $ref: "#/components/schemas/SecretName" - modification_date: - $ref: "#/components/schemas/ModificationDate" kind: $ref: "#/components/schemas/SecretKind" + expiration_timestamp: + $ref: "#/components/schemas/ExpirationTimestamp" + modification_date: + $ref: "#/components/schemas/ModificationDate" required: - "id" - "name" - - "modification_date" - "kind" + - "modification_date" example: id: "01AN4Z79ZS5XN0F25N3DB94T4R" name: "S3-Credentials" - modification_date: "2024-01-16T11:42:05Z" kind: general + expiration_timestamp: null + modification_date: "2024-01-16T11:42:05Z" SecretPost: description: Secret metadata to be created type: object @@ -446,6 +449,8 @@ components: - $ref: "#/components/schemas/SecretKind" - default: "general" default: general + expiration_timestamp: + $ref: "#/components/schemas/ExpirationTimestamp" required: - "name" - "value" @@ -456,6 +461,8 @@ components: properties: value: $ref: "#/components/schemas/SecretValue" + expiration_timestamp: + $ref: "#/components/schemas/ExpirationTimestamp" required: - "value" SecretName: @@ -487,6 +494,13 @@ components: enum: - general - storage + ExpirationTimestamp: + description: The date and time the secret is not valid anymore (this is in any timezone) + type: string + nullable: true + format: date-time + example: "2030-11-01T17:32:28UTC+01:00" + default: null UserPreferences: type: object description: The object containing user preferences diff --git a/components/renku_data_services/users/apispec.py b/components/renku_data_services/users/apispec.py index 5e0637b51..01cf4be08 100644 --- a/components/renku_data_services/users/apispec.py +++ b/components/renku_data_services/users/apispec.py @@ -198,12 +198,17 @@ class SecretWithId(BaseAPISpec): min_length=1, pattern="^[a-zA-Z0-9_\\-.]*$", ) + kind: SecretKind + expiration_timestamp: Optional[datetime] = Field( + None, + description="The date and time the secret is not valid anymore (this is in any timezone)", + example="2030-11-01T17:32:28UTC+01:00", + ) modification_date: datetime = Field( ..., description="The date and time the secret was created or modified (this is always in UTC)", example="2023-11-01T17:32:28Z", ) - kind: SecretKind class SecretPost(BaseAPISpec): @@ -225,6 +230,11 @@ class SecretPost(BaseAPISpec): min_length=1, ) kind: SecretKind = SecretKind.general + expiration_timestamp: Optional[datetime] = Field( + None, + description="The date and time the secret is not valid anymore (this is in any timezone)", + example="2030-11-01T17:32:28UTC+01:00", + ) class SecretPatch(BaseAPISpec): @@ -237,6 +247,11 @@ class SecretPatch(BaseAPISpec): max_length=5000, min_length=1, ) + expiration_timestamp: Optional[datetime] = Field( + None, + description="The date and time the secret is not valid anymore (this is in any timezone)", + example="2030-11-01T17:32:28UTC+01:00", + ) class PinnedProjects(BaseAPISpec): diff --git a/components/renku_data_services/users/blueprints.py b/components/renku_data_services/users/blueprints.py index 74dd7971f..ad8a48e48 100644 --- a/components/renku_data_services/users/blueprints.py +++ b/components/renku_data_services/users/blueprints.py @@ -152,7 +152,11 @@ async def _get_all( secret_kind = SecretKind[query.kind.value] secrets = await self.secret_repo.get_user_secrets(requested_by=user, kind=secret_kind) secrets_json = [ - secret.model_dump(include={"name", "id", "modification_date", "kind"}, exclude_none=True, mode="json") + secret.model_dump( + include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, + exclude_none=True, + mode="json", + ) for secret in secrets ] return validated_json( @@ -173,9 +177,11 @@ async def _get_one(_: Request, user: base_models.APIUser, secret_id: ULID) -> JS if not secret: raise errors.MissingResourceError(message=f"The secret with id {secret_id} cannot be found.") result = secret.model_dump( - include={"name", "id", "modification_date", "kind"}, exclude_none=True, mode="json" + include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, + exclude_none=False, + mode="json", ) - return validated_json(apispec.SecretWithId, result) + return validated_json(apispec.SecretWithId, result, exclude_none=False) return "/user/secrets/", ["GET"], _get_one @@ -197,12 +203,15 @@ async def _post(_: Request, user: base_models.APIUser, body: apispec.SecretPost) encrypted_value=encrypted_value, encrypted_key=encrypted_key, kind=SecretKind[body.kind.value], + expiration_timestamp=body.expiration_timestamp, ) inserted_secret = await self.secret_repo.insert_secret(requested_by=user, secret=secret) result = inserted_secret.model_dump( - include={"name", "id", "modification_date", "kind"}, exclude_none=True, mode="json" + include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, + exclude_none=False, + mode="json", ) - return validated_json(apispec.SecretWithId, result, 201) + return validated_json(apispec.SecretWithId, result, 201, exclude_none=False) return "/user/secrets", ["POST"], _post @@ -222,13 +231,19 @@ async def _patch( secret_value=body.value, ) updated_secret = await self.secret_repo.update_secret( - requested_by=user, secret_id=secret_id, encrypted_value=encrypted_value, encrypted_key=encrypted_key + requested_by=user, + secret_id=secret_id, + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + expiration_timestamp=body.expiration_timestamp, ) result = updated_secret.model_dump( - include={"name", "id", "modification_date", "kind"}, exclude_none=True, mode="json" + include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, + exclude_none=False, + mode="json", ) - return validated_json(apispec.SecretWithId, result) + return validated_json(apispec.SecretWithId, result, exclude_none=False) return "/user/secrets/", ["PATCH"], _patch diff --git a/components/renku_data_services/utils/core.py b/components/renku_data_services/utils/core.py index aa57eddaa..536a23eb9 100644 --- a/components/renku_data_services/utils/core.py +++ b/components/renku_data_services/utils/core.py @@ -1,10 +1,10 @@ """Shared utility functions.""" -import datetime import functools import os import ssl from collections.abc import Awaitable, Callable +from datetime import datetime, timedelta from typing import Any, Concatenate, ParamSpec, Protocol, TypeVar, cast import httpx @@ -93,54 +93,69 @@ async def transaction_wrapper(self: _WithSessionMaker, *args: _P.args, **kwargs: return transaction_wrapper +def _get_url(host: str) -> str: + return f"https://{host}/openbis/openbis/rmi-application-server-v3.json" + + +async def get_openbis_session_token( + host: str, + username: str, + password: str, + timeout: int = 12, +) -> str: + """Requests an openBIS session token with the user's login credentials.""" + login = {"method": "login", "params": [username, password], "id": "2", "jsonrpc": "2.0"} + async with httpx.AsyncClient(verify=get_ssl_context()) as client: + response = await client.post(_get_url(host), json=login, timeout=timeout) + json: dict[str, str] = response.json() + return json["result"] + + async def get_openbis_pat( host: str, session_id: str, personal_access_token_session_name: str = "renku", minimum_validity_in_days: int = 2, timeout: int = 12, -) -> str: +) -> tuple[str, datetime]: """Requests an openBIS PAT with an openBIS session ID.""" - url = f"https://{host}/openbis/openbis/rmi-application-server-v3.json" - - get_server_information = {"method": "getServerInformation", "params": [session_id], "id": "2", "jsonrpc": "2.0"} + url = _get_url(host) async with httpx.AsyncClient(verify=get_ssl_context()) as client: + get_server_information = {"method": "getServerInformation", "params": [session_id], "id": "2", "jsonrpc": "2.0"} response = await client.post(url, json=get_server_information, timeout=timeout) if response.status_code == 200: json1: dict[str, dict[str, str]] = response.json() - personal_access_tokens_max_validity_period = int( - json1["result"]["personal-access-tokens-max-validity-period"] - ) - - valid_from = datetime.datetime.now() - valid_to = valid_from + datetime.timedelta(seconds=personal_access_tokens_max_validity_period) - validity_in_days = (valid_to - valid_from).days - if validity_in_days >= minimum_validity_in_days: - create_personal_access_tokens = { - "method": "createPersonalAccessTokens", - "params": [ - session_id, - { - "@type": "as.dto.pat.create.PersonalAccessTokenCreation", - "sessionName": personal_access_token_session_name, - "validFromDate": int(valid_from.timestamp() * 1000), - "validToDate": int(valid_to.timestamp() * 1000), - }, - ], - "id": "2", - "jsonrpc": "2.0", - } - - response = await client.post(url, json=create_personal_access_tokens, timeout=timeout) - - if response.status_code == 200: - json2: dict[str, list[dict[str, str]]] = response.json() - return json2["result"][0]["permId"] - else: - raise Exception( - "The maximum allowed validity period of a personal access token is less than " - f"{minimum_validity_in_days} days." + if "error" not in json1: + personal_access_tokens_max_validity_period = int( + json1["result"]["personal-access-tokens-max-validity-period"] ) + valid_from = datetime.now() + valid_to = valid_from + timedelta(seconds=personal_access_tokens_max_validity_period) + validity_in_days = (valid_to - valid_from).days + if validity_in_days >= minimum_validity_in_days: + create_personal_access_tokens = { + "method": "createPersonalAccessTokens", + "params": [ + session_id, + { + "@type": "as.dto.pat.create.PersonalAccessTokenCreation", + "sessionName": personal_access_token_session_name, + "validFromDate": int(valid_from.timestamp() * 1000), + "validToDate": int(valid_to.timestamp() * 1000), + }, + ], + "id": "2", + "jsonrpc": "2.0", + } + response = await client.post(url, json=create_personal_access_tokens, timeout=timeout) + if response.status_code == 200: + json2: dict[str, list[dict[str, str]]] = response.json() + return json2["result"][0]["permId"], valid_to + else: + raise Exception( + "The maximum allowed validity period of a personal access token is less than " + f"{minimum_validity_in_days} days." + ) raise Exception("An openBIS personal access token related request failed.") diff --git a/test/bases/renku_data_services/data_api/test_secret.py b/test/bases/renku_data_services/data_api/test_secret.py index c4b132ab9..e47304247 100644 --- a/test/bases/renku_data_services/data_api/test_secret.py +++ b/test/bases/renku_data_services/data_api/test_secret.py @@ -1,6 +1,8 @@ """Tests for secrets blueprints.""" +import time from base64 import b64decode +from datetime import datetime, timedelta from typing import Any import pytest @@ -23,8 +25,10 @@ @pytest.fixture def create_secret(sanic_client: SanicASGITestClient, user_headers): - async def create_secret_helper(name: str, value: str, kind: str = "general") -> dict[str, Any]: - payload = {"name": name, "value": value, "kind": kind} + async def create_secret_helper( + name: str, value: str, kind: str = "general", expiration_timestamp: str = None + ) -> dict[str, Any]: + payload = {"name": name, "value": value, "kind": kind, "expiration_timestamp": expiration_timestamp} _, response = await sanic_client.post("/api/data/user/secrets", headers=user_headers, json=payload) @@ -46,11 +50,32 @@ async def test_create_secrets(sanic_client: SanicASGITestClient, user_headers, k assert response.status_code == 201, response.text assert response.json is not None - assert response.json.keys() == {"name", "id", "modification_date", "kind"} + assert response.json.keys() == {"id", "name", "kind", "expiration_timestamp", "modification_date"} assert response.json["name"] == "my-secret" assert response.json["id"] is not None + assert response.json["kind"] == kind + assert response.json["expiration_timestamp"] is None assert response.json["modification_date"] is not None + + +@pytest.mark.asyncio +@pytest.mark.parametrize("kind", [e.value for e in apispec.SecretKind]) +async def test_create_secrets_with_expiration_timestamps(sanic_client: SanicASGITestClient, user_headers, kind) -> None: + payload = { + "name": "my-secret-that-expires", + "value": "42", + "kind": kind, + "expiration_timestamp": "2029-12-31T23:59:59+01:00", + } + _, response = await sanic_client.post("/api/data/user/secrets", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + assert response.json is not None + assert response.json.keys() == {"id", "name", "kind", "expiration_timestamp", "modification_date"} + assert response.json["name"] == "my-secret-that-expires" + assert response.json["id"] is not None assert response.json["kind"] == kind + assert response.json["expiration_timestamp"] == "2029-12-31T23:59:59+01:00" + assert response.json["modification_date"] is not None @pytest.mark.asyncio @@ -59,15 +84,36 @@ async def test_get_one_secret(sanic_client: SanicASGITestClient, user_headers, c secret = await create_secret("secret-2", "value-2") await create_secret("secret-3", "value-3") - secret_id = secret["id"] + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json["name"] == secret["name"] + assert response.json["id"] == secret["id"] + assert "value" not in response.json + + +@pytest.mark.asyncio +async def test_get_one_secret_not_expired(sanic_client: SanicASGITestClient, user_headers, create_secret) -> None: + expiration_timestamp = (datetime.now() + timedelta(seconds=(120 + 15))).isoformat() + secret_1 = await create_secret("secret-1", "value-1", expiration_timestamp=expiration_timestamp) + secret_2 = await create_secret("secret-2", "value-2", expiration_timestamp="2029-12-31") - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_id}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_1["id"]}", headers=user_headers) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json["name"] == "secret-1" + assert response.json["id"] == secret_1["id"] + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_2["id"]}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None assert response.json["name"] == "secret-2" - assert response.json["id"] == secret_id - assert "value" not in response.json + assert response.json["id"] == secret_2["id"] + + time.sleep(20) + + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_1["id"]}", headers=user_headers) + assert response.status_code == 404 @pytest.mark.asyncio @@ -84,6 +130,22 @@ async def test_get_all_secrets(sanic_client: SanicASGITestClient, user_headers, assert {s["name"] for s in response.json} == {"secret-1", "secret-2", "secret-3"} +@pytest.mark.asyncio +async def test_get_all_secrets_not_expired(sanic_client: SanicASGITestClient, user_headers, create_secret) -> None: + expiration_timestamp = (datetime.now() + timedelta(seconds=10)).isoformat() + await create_secret("secret-1", "value-1", expiration_timestamp=expiration_timestamp) + await create_secret("secret-2", "value-2") + await create_secret("secret-3", "value-3", expiration_timestamp="2029-12-31") + + time.sleep(15) + + _, response = await sanic_client.get("/api/data/user/secrets", headers=user_headers) + assert response.status_code == 200, response.text + assert response.json is not None + assert {s["name"] for s in response.json} == {"secret-2", "secret-3"} + assert {s["expiration_timestamp"] for s in response.json if s["name"] == "secret-3"} == {"2029-12-31T00:00:00Z"} + + @pytest.mark.asyncio async def test_get_all_secrets_filtered_by_kind(sanic_client, user_headers, create_secret) -> None: await create_secret("secret-1", "value-1") @@ -114,14 +176,10 @@ async def test_get_delete_a_secret(sanic_client: SanicASGITestClient, user_heade secret = await create_secret("secret-2", "value-2") await create_secret("secret-3", "value-3") - secret_id = secret["id"] - - _, response = await sanic_client.delete(f"/api/data/user/secrets/{secret_id}", headers=user_headers) - + _, response = await sanic_client.delete(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) assert response.status_code == 204, response.text _, response = await sanic_client.get("/api/data/user/secrets", headers=user_headers) - assert response.status_code == 200, response.text assert response.json is not None assert {s["name"] for s in response.json} == {"secret-1", "secret-3"} @@ -133,18 +191,42 @@ async def test_get_update_a_secret(sanic_client: SanicASGITestClient, user_heade secret = await create_secret("secret-2", "value-2") await create_secret("secret-3", "value-3") - secret_id = secret["id"] - payload = {"value": "new-value"} + _, response = await sanic_client.patch( + f"/api/data/user/secrets/{secret["id"]}", headers=user_headers, json={"name": "new-name", "value": "new-value"} + ) + assert response.status_code == 422 - _, response = await sanic_client.patch(f"/api/data/user/secrets/{secret_id}", headers=user_headers, json=payload) + _, response = await sanic_client.patch( + f"/api/data/user/secrets/{secret["id"]}", headers=user_headers, json={"value": "new-value"} + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json["id"] == secret["id"] + assert response.json["name"] == secret["name"] + assert response.json["expiration_timestamp"] is None + assert "value" not in response.json + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) assert response.status_code == 200, response.text + assert response.json is not None + assert response.json["id"] == secret["id"] + assert response.json["name"] == secret["name"] + assert response.json["expiration_timestamp"] is None + assert "value" not in response.json - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_id}", headers=user_headers) + _, response = await sanic_client.patch( + f"/api/data/user/secrets/{secret["id"]}", + headers=user_headers, + json={"value": "newest-value", "expiration_timestamp": "2029-12-31"}, + ) + assert response.status_code == 200, response.text + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None - assert response.json["id"] == secret_id + assert response.json["id"] == secret["id"] + assert response.json["name"] == secret["name"] + assert response.json["expiration_timestamp"] == "2029-12-31T00:00:00Z" assert "value" not in response.json @@ -156,15 +238,11 @@ async def test_cannot_get_another_user_secret( secret = await create_secret("secret-2", "value-2") await create_secret("secret-3", "value-3") - secret_id = secret["id"] - - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_id}", headers=admin_headers) - + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=admin_headers) assert response.status_code == 404, response.text assert "cannot be found" in response.json["error"]["message"] _, response = await sanic_client.get("/api/data/user/secrets", headers=admin_headers) - assert response.status_code == 200, response.text assert response.json == [] diff --git a/test/bases/renku_data_services/data_api/test_storage_v2.py b/test/bases/renku_data_services/data_api/test_storage_v2.py new file mode 100644 index 000000000..e69de29bb diff --git a/test/conftest.py b/test/conftest.py index fffa7941e..788c993eb 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -252,3 +252,15 @@ def only(iterable, default=None, too_long=None): raise too_long or ValueError(msg) return first_value + + +@pytest.hookimpl(tryfirst=True) +def pytest_runtest_setup(item): + mark = item.get_closest_marker(name="myskip") + if mark: + condition = next(iter(mark.args), True) + reason = mark.kwargs.get("reason") + item.add_marker( + pytest.mark.skipif(not os.getenv("PYTEST_FORCE_RUN_MYSKIPS", False) and condition, reason=reason), + append=False, + ) From 954595b6694cca991b7a70f1344d99570dfeae48 Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 5 Sep 2024 16:22:39 +0200 Subject: [PATCH 07/15] feat: implement review points --- .../renku_data_services/storage/core.py | 25 +++++++++++++++++++ .../renku_data_services/storage/rclone.py | 22 +++++++++++++++- components/renku_data_services/utils/core.py | 9 +++++-- 3 files changed, 53 insertions(+), 3 deletions(-) create mode 100644 components/renku_data_services/storage/core.py diff --git a/components/renku_data_services/storage/core.py b/components/renku_data_services/storage/core.py new file mode 100644 index 000000000..b34fac4f9 --- /dev/null +++ b/components/renku_data_services/storage/core.py @@ -0,0 +1,25 @@ +"""Business logic for storage.""" + +from datetime import datetime + +from renku_data_services import errors +from renku_data_services.storage import models +from renku_data_services.utils.core import get_openbis_pat + + +async def storage_secrets_preparation( + secrets: list[models.CloudStorageSecretUpsert], + storage: models.CloudStorage, + expiration_timestamp: datetime | None = None, +) -> tuple[list[models.CloudStorageSecretUpsert], datetime | None]: + """Prepare the validated secrets so that they can be stored (long-term).""" + if storage.storage_type == "openbis": + try: + ( + secrets[0].value, + expiration_timestamp, + ) = await get_openbis_pat(storage.configuration["host"], secrets[0].value) + except Exception as e: + raise errors.ProgrammingError(message=str(e)) from e + + return secrets, expiration_timestamp diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 56d54a4eb..64867c08f 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -141,6 +141,13 @@ def __patch_schema_add_openbis_type(spec: list[dict[str, Any]]) -> None: "Provider": "", "Default": "", "Value": None, + "Examples": [ + { + "Value": "openbis-eln-lims.ethz.ch", + "Help": "Public openBIS demo instance", + "Provider": "", + }, + ], "ShortOpt": "", "Hide": 0, "Required": True, @@ -226,6 +233,19 @@ def validate(self, configuration: Union["RCloneConfig", dict[str, Any]], keep_se provider.validate_config(configuration, keep_sensitive=keep_sensitive) + def validate_sensitive_data( + self, configuration: Union["RCloneConfig", dict[str, Any]], sensitive_data: dict[str, str] + ) -> None: + """Validates whether the provided sensitive data is marked as sensitive in the rclone schema.""" + sensitive_options = self.get_provider(configuration).sensitive_options + sensitive_options_name_lookup = [o.name for o in sensitive_options] + sensitive_data_counter = 0 + for key, value in sensitive_data.items(): + if len(value) > 0 and key in sensitive_options_name_lookup: + sensitive_data_counter += 1 + continue + raise errors.ValidationError(message=f"The '{key}' property is not marked as sensitive.") + async def test_connection( self, configuration: Union["RCloneConfig", dict[str, Any]], source_path: str ) -> ConnectionResult: @@ -416,7 +436,7 @@ class RCloneProviderSchema(BaseModel): @property def required_options(self) -> list[RCloneOption]: """Returns all required options for this provider.""" - return [o for o in self.options if o.required and not o.sensitive] + return [o for o in self.options if o.required] @property def sensitive_options(self) -> list[RCloneOption]: diff --git a/components/renku_data_services/utils/core.py b/components/renku_data_services/utils/core.py index 536a23eb9..e684f4c53 100644 --- a/components/renku_data_services/utils/core.py +++ b/components/renku_data_services/utils/core.py @@ -107,8 +107,13 @@ async def get_openbis_session_token( login = {"method": "login", "params": [username, password], "id": "2", "jsonrpc": "2.0"} async with httpx.AsyncClient(verify=get_ssl_context()) as client: response = await client.post(_get_url(host), json=login, timeout=timeout) - json: dict[str, str] = response.json() - return json["result"] + if response.status_code == 200: + json: dict[str, str] = response.json() + if "result" in json: + return json["result"] + raise Exception("No session token was returned. Username and password may be incorrect.") + + raise Exception("An openBIS session token related request failed.") async def get_openbis_pat( From 73cd7030db89e39c0e5cf6648fa785c18481cfb5 Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 5 Sep 2024 16:41:59 +0200 Subject: [PATCH 08/15] feat: add openBIS test connection --- .../renku_data_services/storage/rclone.py | 18 ++++++++-- .../data_api/test_storage.py | 36 ++++++++++++++++++- 2 files changed, 50 insertions(+), 4 deletions(-) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 64867c08f..53719647a 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -246,6 +246,16 @@ def validate_sensitive_data( continue raise errors.ValidationError(message=f"The '{key}' property is not marked as sensitive.") + def get_real_config(self, configuration: Union["RCloneConfig", dict[str, Any]]) -> dict[str, Any]: + """Converts a Renku rclone configuration to a real rclone config.""" + real_config = dict(configuration) + if configuration["type"] == "openbis": + real_config["type"] = "sftp" + real_config["port"] = "2222" + real_config["user"] = "?" + real_config["pass"] = real_config.pop("session_token") + return real_config + async def test_connection( self, configuration: Union["RCloneConfig", dict[str, Any]], source_path: str ) -> ConnectionResult: @@ -255,15 +265,17 @@ async def test_connection( except errors.ValidationError as e: return ConnectionResult(False, str(e)) - obscured_config = await self.obscure_config(configuration) + obscured_rclone_config = await self.obscure_config(self.get_real_config(configuration)) with tempfile.NamedTemporaryFile(mode="w+", delete=False, encoding="utf-8") as f: - config = "\n".join(f"{k}={v}" for k, v in obscured_config.items()) - f.write(f"[temp]\n{config}") + obscured_rclone_config_string = "\n".join(f"{k}={v}" for k, v in obscured_rclone_config.items()) + f.write(f"[temp]\n{obscured_rclone_config_string}") f.close() proc = await asyncio.create_subprocess_exec( "rclone", "lsf", + "--low-level-retries=1", # Connection tests should fail fast. + "--retries=1", # Connection tests should fail fast. "--config", f.name, f"temp:{source_path}", diff --git a/test/bases/renku_data_services/data_api/test_storage.py b/test/bases/renku_data_services/data_api/test_storage.py index 219284a76..5cf89ef59 100644 --- a/test/bases/renku_data_services/data_api/test_storage.py +++ b/test/bases/renku_data_services/data_api/test_storage.py @@ -11,6 +11,7 @@ from renku_data_services.data_api.app import register_all_handlers from renku_data_services.migrations.core import run_migrations_for_app from renku_data_services.storage.rclone import RCloneValidator +from renku_data_services.utils.core import get_openbis_session_token from test.utils import SanicReusableASGITestClient _valid_storage: dict[str, Any] = { @@ -538,7 +539,7 @@ async def test_storage_validate_connection(storage_test_client) -> None: _, res = await storage_test_client.post("/api/data/storage_schema/test_connection", data=json.dumps(body)) assert res.status_code == 422 - body = {"configuration": {"type": "s3", "provider": "AWS"}, "source_path": "doesntexistatall/"} + body = {"configuration": {"type": "s3", "provider": "AWS"}, "source_path": "does_not_exist_at_all/"} _, res = await storage_test_client.post("/api/data/storage_schema/test_connection", data=json.dumps(body)) assert res.status_code == 422 @@ -547,6 +548,39 @@ async def test_storage_validate_connection(storage_test_client) -> None: assert res.status_code == 204 +@pytest.mark.myskip(1 == 1, reason="Depends on a remote openBIS host which may not always be available.") +@pytest.mark.asyncio +async def test_openbis_storage_validate_connection(storage_test_client) -> None: + openbis_session_token = await get_openbis_session_token( + host="openbis-eln-lims.ethz.ch", # Public openBIS demo instance. + username="observer", + password="1234", + ) + storage_test_client, _ = storage_test_client + + body = { + "configuration": { + "type": "openbis", + "host": "openbis-eln-lims.ethz.ch", + "session_token": openbis_session_token, + }, + "source_path": "does_not_exist_at_all/", + } + _, res = await storage_test_client.post("/api/data/storage_schema/test_connection", data=json.dumps(body)) + assert res.status_code == 422 + + body = { + "configuration": { + "type": "openbis", + "host": "openbis-eln-lims.ethz.ch", + "session_token": openbis_session_token, + }, + "source_path": "/", + } + _, res = await storage_test_client.post("/api/data/storage_schema/test_connection", data=json.dumps(body)) + assert res.status_code == 204 + + @pytest.mark.asyncio async def test_storage_validate_error(storage_test_client) -> None: storage_test_client, _ = storage_test_client From e6a18539835ee61075f822d463b28c72ab1f01a2 Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 28 Nov 2024 10:38:06 +0100 Subject: [PATCH 09/15] fix: correct a rebase merge --- DEVELOPING.md | 2 +- .../data_connectors/blueprints.py | 51 ++++++++++++++- .../renku_data_services/data_connectors/db.py | 12 +++- ...441beb_add_secret_expiration_timestamp.py} | 12 ++-- .../renku_data_services/storage/core.py | 25 -------- components/renku_data_services/utils/core.py | 4 +- .../renku_data_services/data_api/conftest.py | 33 ++++++++++ .../data_api/test_data_connectors.py | 64 +++++++++++++++++-- .../data_api/test_storage_v2.py | 0 9 files changed, 159 insertions(+), 44 deletions(-) rename components/renku_data_services/migrations/versions/{7bc32829ed2f_add_secret_expiration_timestamp.py => 4d2a21441beb_add_secret_expiration_timestamp.py} (76%) delete mode 100644 components/renku_data_services/storage/core.py delete mode 100644 test/bases/renku_data_services/data_api/test_storage_v2.py diff --git a/DEVELOPING.md b/DEVELOPING.md index 059189296..2cc6a8d70 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -117,7 +117,7 @@ function if you prefer to keep your favorite shell. You can run style checks using `make style_checks`. To run the test suite, use `make tests` (you likely need to run in the devcontainer for this to work, as it needs some surrounding services to run). -* Run a specific test e.g.: `poetry run pytest test/bases/renku_data_services/data_api/test_storage_v2.py::test_storage_v2_create_openbis_secret` +* Run a specific test e.g.: `poetry run pytest test/bases/renku_data_services/data_api/test_data_connectors.py::test_create_openbis_data_connector` * Also run tests marked with `@pytest.mark.myskip`: `PYTEST_FORCE_RUN_MYSKIPS=1 make tests` ## Migrations diff --git a/components/renku_data_services/data_connectors/blueprints.py b/components/renku_data_services/data_connectors/blueprints.py index f5469de13..73da71222 100644 --- a/components/renku_data_services/data_connectors/blueprints.py +++ b/components/renku_data_services/data_connectors/blueprints.py @@ -1,6 +1,7 @@ """Data connectors blueprint.""" from dataclasses import dataclass +from datetime import datetime from typing import Any from sanic import Request @@ -8,7 +9,7 @@ from sanic_ext import validate from ulid import ULID -from renku_data_services import base_models +from renku_data_services import base_models, errors from renku_data_services.base_api.auth import ( authenticate, only_authenticated, @@ -31,6 +32,7 @@ DataConnectorSecretRepository, ) from renku_data_services.storage.rclone import RCloneValidator +from renku_data_services.utils.core import get_openbis_pat @dataclass(kw_only=True) @@ -310,10 +312,55 @@ async def _patch_secrets( user: base_models.APIUser, data_connector_id: ULID, body: apispec.DataConnectorSecretPatchList, + validator: RCloneValidator, ) -> JSONResponse: unsaved_secrets = validate_data_connector_secrets_patch(put=body) + data_connector = await self.data_connector_repo.get_data_connector( + user=user, data_connector_id=data_connector_id + ) + storage = data_connector.storage + provider = validator.providers[storage.storage_type] + sensitive_lookup = [o.name for o in provider.options if o.sensitive] + for secret in unsaved_secrets: + if secret.name in sensitive_lookup: + continue + raise errors.ValidationError( + message=f"The '{secret.name}' property is not marked sensitive and can not be saved in the secret " + f"storage." + ) + expiration_timestamp = None + + if storage.storage_type == "openbis": + + async def openbis_transform_session_token_to_pat() -> ( + tuple[list[models.DataConnectorSecretUpdate], datetime] + ): + if len(unsaved_secrets) == 1 and unsaved_secrets[0].name == "session_token": + if unsaved_secrets[0].value is not None: + try: + openbis_pat = await get_openbis_pat( + storage.configuration["host"], unsaved_secrets[0].value + ) + return ( + [models.DataConnectorSecretUpdate(name="session_token", value=openbis_pat[0])], + openbis_pat[1], + ) + except Exception as e: + raise errors.ProgrammingError(message=str(e)) + raise errors.ValidationError(message="The openBIS session token must be a string value.") + + raise errors.ValidationError(message="The openBIS storage has only one secret: session_token") + + ( + unsaved_secrets, + expiration_timestamp, + ) = await openbis_transform_session_token_to_pat() + secrets = await self.data_connector_secret_repo.patch_data_connector_secrets( - user=user, data_connector_id=data_connector_id, secrets=unsaved_secrets + user=user, + data_connector_id=data_connector_id, + secrets=unsaved_secrets, + expiration_timestamp=expiration_timestamp, ) return validated_json( apispec.DataConnectorSecretsList, [self._dump_data_connector_secret(secret) for secret in secrets] diff --git a/components/renku_data_services/data_connectors/db.py b/components/renku_data_services/data_connectors/db.py index 6bda29641..4b6995296 100644 --- a/components/renku_data_services/data_connectors/db.py +++ b/components/renku_data_services/data_connectors/db.py @@ -1,6 +1,7 @@ """Adapters for data connectors database classes.""" from collections.abc import AsyncIterator, Callable +from datetime import datetime from typing import TypeVar from cryptography.hazmat.primitives.asymmetric import rsa @@ -554,7 +555,11 @@ async def get_data_connector_secrets( return [secret.dump() for secret in secrets] async def patch_data_connector_secrets( - self, user: base_models.APIUser, data_connector_id: ULID, secrets: list[models.DataConnectorSecretUpdate] + self, + user: base_models.APIUser, + data_connector_id: ULID, + secrets: list[models.DataConnectorSecretUpdate], + expiration_timestamp: datetime | None, ) -> list[models.DataConnectorSecret]: """Create, update or remove data connector secrets.""" if user.id is None: @@ -598,7 +603,9 @@ async def patch_data_connector_secrets( if data_connector_secret_orm := existing_secrets_as_dict.get(name): data_connector_secret_orm.secret.update( - encrypted_value=encrypted_value, encrypted_key=encrypted_key + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + expiration_timestamp=expiration_timestamp, ) else: secret_orm = secrets_schemas.SecretORM( @@ -607,6 +614,7 @@ async def patch_data_connector_secrets( encrypted_value=encrypted_value, encrypted_key=encrypted_key, kind=SecretKind.storage, + expiration_timestamp=expiration_timestamp, ) data_connector_secret_orm = schemas.DataConnectorSecretORM( name=name, diff --git a/components/renku_data_services/migrations/versions/7bc32829ed2f_add_secret_expiration_timestamp.py b/components/renku_data_services/migrations/versions/4d2a21441beb_add_secret_expiration_timestamp.py similarity index 76% rename from components/renku_data_services/migrations/versions/7bc32829ed2f_add_secret_expiration_timestamp.py rename to components/renku_data_services/migrations/versions/4d2a21441beb_add_secret_expiration_timestamp.py index 0812b2adb..d7f4b22db 100644 --- a/components/renku_data_services/migrations/versions/7bc32829ed2f_add_secret_expiration_timestamp.py +++ b/components/renku_data_services/migrations/versions/4d2a21441beb_add_secret_expiration_timestamp.py @@ -1,8 +1,8 @@ -"""add_secret_expiration_timestamp +"""add secret expiration timestamp -Revision ID: 7bc32829ed2f -Revises: 9058bf0a1a12 -Create Date: 2024-08-21 12:38:30.932694 +Revision ID: 4d2a21441beb +Revises: 1ef98b967767 +Create Date: 2024-11-21 17:01:56.468831 """ @@ -10,8 +10,8 @@ from alembic import op # revision identifiers, used by Alembic. -revision = "7bc32829ed2f" -down_revision = "9058bf0a1a12" +revision = "4d2a21441beb" +down_revision = "1ef98b967767" branch_labels = None depends_on = None diff --git a/components/renku_data_services/storage/core.py b/components/renku_data_services/storage/core.py deleted file mode 100644 index b34fac4f9..000000000 --- a/components/renku_data_services/storage/core.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Business logic for storage.""" - -from datetime import datetime - -from renku_data_services import errors -from renku_data_services.storage import models -from renku_data_services.utils.core import get_openbis_pat - - -async def storage_secrets_preparation( - secrets: list[models.CloudStorageSecretUpsert], - storage: models.CloudStorage, - expiration_timestamp: datetime | None = None, -) -> tuple[list[models.CloudStorageSecretUpsert], datetime | None]: - """Prepare the validated secrets so that they can be stored (long-term).""" - if storage.storage_type == "openbis": - try: - ( - secrets[0].value, - expiration_timestamp, - ) = await get_openbis_pat(storage.configuration["host"], secrets[0].value) - except Exception as e: - raise errors.ProgrammingError(message=str(e)) from e - - return secrets, expiration_timestamp diff --git a/components/renku_data_services/utils/core.py b/components/renku_data_services/utils/core.py index e684f4c53..587f5528d 100644 --- a/components/renku_data_services/utils/core.py +++ b/components/renku_data_services/utils/core.py @@ -105,7 +105,7 @@ async def get_openbis_session_token( ) -> str: """Requests an openBIS session token with the user's login credentials.""" login = {"method": "login", "params": [username, password], "id": "2", "jsonrpc": "2.0"} - async with httpx.AsyncClient(verify=get_ssl_context()) as client: + async with httpx.AsyncClient(verify=get_ssl_context(), timeout=5) as client: response = await client.post(_get_url(host), json=login, timeout=timeout) if response.status_code == 200: json: dict[str, str] = response.json() @@ -126,7 +126,7 @@ async def get_openbis_pat( """Requests an openBIS PAT with an openBIS session ID.""" url = _get_url(host) - async with httpx.AsyncClient(verify=get_ssl_context()) as client: + async with httpx.AsyncClient(verify=get_ssl_context(), timeout=5) as client: get_server_information = {"method": "getServerInformation", "params": [session_id], "id": "2", "jsonrpc": "2.0"} response = await client.post(url, json=get_server_information, timeout=timeout) if response.status_code == 200: diff --git a/test/bases/renku_data_services/data_api/conftest.py b/test/bases/renku_data_services/data_api/conftest.py index 9259ab07a..9b1514282 100644 --- a/test/bases/renku_data_services/data_api/conftest.py +++ b/test/bases/renku_data_services/data_api/conftest.py @@ -371,6 +371,39 @@ async def create_data_connector_helper( return create_data_connector_helper +@pytest_asyncio.fixture +def create_openbis_data_connector(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers): + async def create_openbis_data_connector_helper( + name: str, session_token: str, user: UserInfo | None = None, headers: dict[str, str] | None = None, **payload + ) -> Any: + user = user or regular_user + headers = headers or user_headers + dc_payload = { + "name": name, + "description": "An openBIS data connector", + "visibility": "private", + "namespace": user.namespace.slug, + "storage": { + "configuration": { + "type": "openbis", + "host": "openbis-eln-lims.ethz.ch", # Public openBIS demo instance. + "session_token": session_token, + }, + "source_path": "/", + "target_path": "my/target", + }, + "keywords": ["keyword 1", "keyword.2", "keyword-3", "KEYWORD_4"], + } + dc_payload.update(payload) + + _, response = await sanic_client.post("/api/data/data_connectors", headers=headers, json=dc_payload) + + assert response.status_code == 201, response.text + return response.json + + return create_openbis_data_connector_helper + + @pytest_asyncio.fixture async def create_data_connector_and_link_project( sanic_client, regular_user, user_headers, admin_user, admin_headers, create_data_connector diff --git a/test/bases/renku_data_services/data_api/test_data_connectors.py b/test/bases/renku_data_services/data_api/test_data_connectors.py index 457fa472d..1ca8515b0 100644 --- a/test/bases/renku_data_services/data_api/test_data_connectors.py +++ b/test/bases/renku_data_services/data_api/test_data_connectors.py @@ -2,9 +2,9 @@ from sanic_testing.testing import SanicASGITestClient from renku_data_services.users.models import UserInfo +from renku_data_services.utils.core import get_openbis_session_token from test.bases.renku_data_services.data_api.utils import merge_headers - @pytest.mark.asyncio async def test_post_data_connector(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers) -> None: payload = { @@ -1073,6 +1073,14 @@ async def test_patch_data_connector_secrets( assert len(secrets) == 2 assert {s["name"] for s in secrets} == {"access_key_id", "secret_access_key"} + payload = [ + {"name": "not_sensitive", "value": "not_sensitive_value"}, + ] + _, response = await sanic_client.patch( + f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload + ) + assert response.status_code == 422, response.json + @pytest.mark.asyncio async def test_patch_data_connector_secrets_update_secrets( @@ -1142,7 +1150,7 @@ async def test_patch_data_connector_secrets_add_and_remove_secrets( payload = [ {"name": "access_key_id", "value": "new access key id value"}, {"name": "secret_access_key", "value": None}, - {"name": "password", "value": "password"}, + {"name": "sse_kms_key_id", "value": "password"}, ] _, response = await sanic_client.patch( f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload @@ -1152,7 +1160,7 @@ async def test_patch_data_connector_secrets_add_and_remove_secrets( assert response.json is not None secrets = response.json assert len(secrets) == 2 - assert {s["name"] for s in secrets} == {"access_key_id", "password"} + assert {s["name"] for s in secrets} == {"access_key_id", "sse_kms_key_id"} new_access_key_id_secret_id = next(filter(lambda s: s["name"] == "access_key_id", secrets), None) assert new_access_key_id_secret_id == access_key_id_secret_id @@ -1162,15 +1170,14 @@ async def test_patch_data_connector_secrets_add_and_remove_secrets( assert response.json is not None secrets = response.json assert len(secrets) == 2 - assert {s["name"] for s in secrets} == {"access_key_id", "password"} + assert {s["name"] for s in secrets} == {"access_key_id", "sse_kms_key_id"} # Check the associated secrets _, response = await sanic_client.get("/api/data/user/secrets", params={"kind": "storage"}, headers=user_headers) - assert response.status_code == 200 assert response.json is not None assert len(response.json) == 2 - assert {s["name"] for s in secrets} == {"access_key_id", "password"} + assert {s["name"] for s in secrets} == {"access_key_id", "sse_kms_key_id"} @pytest.mark.asyncio @@ -1210,6 +1217,51 @@ async def test_delete_data_connector_secrets( assert response.json == [], response.json +@pytest.mark.myskip(1 == 1, reason="Depends on a remote openBIS host which may not always be available.") +@pytest.mark.asyncio +async def test_create_openbis_data_connector(sanic_client, create_openbis_data_connector, user_headers) -> None: + openbis_session_token = await get_openbis_session_token( + host="openbis-eln-lims.ethz.ch", # Public openBIS demo instance. + username="observer", + password="1234", + ) + data_connector = await create_openbis_data_connector( + "openBIS data connector 1", session_token=openbis_session_token + ) + data_connector_id = data_connector["id"] + + payload = [ + {"name": "session_token", "value": openbis_session_token}, + ] + _, response = await sanic_client.patch( + f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload + ) + assert response.status_code == 200, response.json + assert {s["name"] for s in response.json} == {"session_token"} + created_secret_ids = {s["secret_id"] for s in response.json} + assert len(created_secret_ids) == 1 + assert response.json[0].keys() == {"secret_id", "name"} + + +@pytest.mark.myskip(1 == 1, reason="Depends on a remote openBIS host which may not always be available.") +@pytest.mark.asyncio +async def test_create_openbis_data_connector_with_invalid_session_token( + sanic_client, create_openbis_data_connector, user_headers +) -> None: + invalid_openbis_session_token = "1234" + data_connector = await create_openbis_data_connector("openBIS data connector 1", invalid_openbis_session_token) + data_connector_id = data_connector["id"] + + payload = [ + {"name": "session_token", "value": invalid_openbis_session_token}, + ] + _, response = await sanic_client.patch( + f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload + ) + assert response.status_code == 500, response.json + assert response.json["error"]["message"] == "An openBIS personal access token related request failed." + + @pytest.mark.asyncio async def test_get_project_permissions_unauthorized( sanic_client, create_data_connector, admin_headers, admin_user, user_headers diff --git a/test/bases/renku_data_services/data_api/test_storage_v2.py b/test/bases/renku_data_services/data_api/test_storage_v2.py deleted file mode 100644 index e69de29bb..000000000 From a9633ac054a011b7fb170204de4bc867e24c46ee Mon Sep 17 00:00:00 2001 From: olloz26 Date: Thu, 28 Nov 2024 11:38:04 +0100 Subject: [PATCH 10/15] fix: correct a rebase merge --- ...=> 57facc53ae84_add_secret_expiration_timestamp.py} | 10 +++++----- .../data_api/test_data_connectors.py | 1 + 2 files changed, 6 insertions(+), 5 deletions(-) rename components/renku_data_services/migrations/versions/{4d2a21441beb_add_secret_expiration_timestamp.py => 57facc53ae84_add_secret_expiration_timestamp.py} (81%) diff --git a/components/renku_data_services/migrations/versions/4d2a21441beb_add_secret_expiration_timestamp.py b/components/renku_data_services/migrations/versions/57facc53ae84_add_secret_expiration_timestamp.py similarity index 81% rename from components/renku_data_services/migrations/versions/4d2a21441beb_add_secret_expiration_timestamp.py rename to components/renku_data_services/migrations/versions/57facc53ae84_add_secret_expiration_timestamp.py index d7f4b22db..ed88aba4f 100644 --- a/components/renku_data_services/migrations/versions/4d2a21441beb_add_secret_expiration_timestamp.py +++ b/components/renku_data_services/migrations/versions/57facc53ae84_add_secret_expiration_timestamp.py @@ -1,8 +1,8 @@ """add secret expiration timestamp -Revision ID: 4d2a21441beb -Revises: 1ef98b967767 -Create Date: 2024-11-21 17:01:56.468831 +Revision ID: 57facc53ae84 +Revises: 08ac2714e8e2 +Create Date: 2024-11-28 10:31:05.683682 """ @@ -10,8 +10,8 @@ from alembic import op # revision identifiers, used by Alembic. -revision = "4d2a21441beb" -down_revision = "1ef98b967767" +revision = "57facc53ae84" +down_revision = "08ac2714e8e2" branch_labels = None depends_on = None diff --git a/test/bases/renku_data_services/data_api/test_data_connectors.py b/test/bases/renku_data_services/data_api/test_data_connectors.py index 1ca8515b0..910702483 100644 --- a/test/bases/renku_data_services/data_api/test_data_connectors.py +++ b/test/bases/renku_data_services/data_api/test_data_connectors.py @@ -5,6 +5,7 @@ from renku_data_services.utils.core import get_openbis_session_token from test.bases.renku_data_services.data_api.utils import merge_headers + @pytest.mark.asyncio async def test_post_data_connector(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers) -> None: payload = { From 67dd950b256d86f0c6f5524f339862a0cb1d93e3 Mon Sep 17 00:00:00 2001 From: olloz26 Date: Wed, 4 Dec 2024 11:26:10 +0100 Subject: [PATCH 11/15] feat: convert openBIS cloud storage configurations into valid rclone configurations before starting a session --- .../notebooks/api/schemas/cloud_storage.py | 16 ++++++++++++---- components/renku_data_services/storage/rclone.py | 11 ++++++++--- 2 files changed, 20 insertions(+), 7 deletions(-) diff --git a/components/renku_data_services/notebooks/api/schemas/cloud_storage.py b/components/renku_data_services/notebooks/api/schemas/cloud_storage.py index 2c1dc4977..0991cb71b 100644 --- a/components/renku_data_services/notebooks/api/schemas/cloud_storage.py +++ b/components/renku_data_services/notebooks/api/schemas/cloud_storage.py @@ -210,16 +210,24 @@ def get_manifest_patch( return patches def config_string(self, name: str) -> str: - """Convert configuration oblect to string representation. + """Convert configuration object to string representation. Needed to create RClone compatible INI files. """ if not self.configuration: raise ValidationError("Missing configuration for cloud storage") - if self.configuration["type"] == "s3" and self.configuration.get("provider", None) == "Switch": + # TODO Use RCloneValidator.get_real_configuration(...) instead. + real_config = dict(self.configuration) + if real_config["type"] == "s3" and real_config.get("provider") == "Switch": # Switch is a fake provider we add for users, we need to replace it since rclone itself # doesn't know it - self.configuration["provider"] = "Other" + real_config["provider"] = "Other" + elif real_config["type"] == "openbis": + real_config["type"] = "sftp" + real_config["port"] = "2222" + real_config["user"] = "?" + real_config["pass"] = real_config.pop("session_token") + parser = ConfigParser() parser.add_section(name) @@ -228,7 +236,7 @@ def _stringify(value: Any) -> str: return "true" if value else "false" return str(value) - for k, v in self.configuration.items(): + for k, v in real_config.items(): parser.set(name, k, _stringify(v)) stringio = StringIO() parser.write(stringio) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 53719647a..1eb013267 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -246,10 +246,15 @@ def validate_sensitive_data( continue raise errors.ValidationError(message=f"The '{key}' property is not marked as sensitive.") - def get_real_config(self, configuration: Union["RCloneConfig", dict[str, Any]]) -> dict[str, Any]: + def get_real_configuration(self, configuration: Union["RCloneConfig", dict[str, Any]]) -> dict[str, Any]: """Converts a Renku rclone configuration to a real rclone config.""" real_config = dict(configuration) - if configuration["type"] == "openbis": + + if real_config["type"] == "s3" and real_config.get("provider") == "Switch": + # Switch is a fake provider we add for users, we need to replace it since rclone itself + # doesn't know it + real_config["provider"] = "Other" + elif configuration["type"] == "openbis": real_config["type"] = "sftp" real_config["port"] = "2222" real_config["user"] = "?" @@ -265,7 +270,7 @@ async def test_connection( except errors.ValidationError as e: return ConnectionResult(False, str(e)) - obscured_rclone_config = await self.obscure_config(self.get_real_config(configuration)) + obscured_rclone_config = await self.obscure_config(self.get_real_configuration(configuration)) with tempfile.NamedTemporaryFile(mode="w+", delete=False, encoding="utf-8") as f: obscured_rclone_config_string = "\n".join(f"{k}={v}" for k, v in obscured_rclone_config.items()) From 4685bb79cd767126d1629803ecf04bf7e1961c4e Mon Sep 17 00:00:00 2001 From: Tasko Olevski Date: Fri, 25 Jul 2025 00:26:09 +0200 Subject: [PATCH 12/15] squashme: merge main squashme --- .devcontainer/devcontainer.json | 38 +- .devcontainer/docker-compose.yml | 34 +- .../rclone/devcontainer-feature.json | 29 + .devcontainer/rclone/install.sh | 21 + .devcontainer/solr/devcontainer-feature.json | 18 + .devcontainer/solr/install.sh | 15 + .github/pull_request_template/feature.md | 34 + .github/workflows/acceptance-tests.yml | 102 +- .github/workflows/save_cache.yml | 4 +- .github/workflows/test_publish.yml | 129 +- .gitignore | 4 + .../_template__of_py_test.xml | 20 + .local_env | 26 + .pre-commit-config.yaml | 12 +- CONTRIBUTING.md | 2 +- DEVELOPING.md | 46 +- Makefile | 135 +- .../background_jobs/__init__.py | 1 - .../background_jobs/config.py | 118 - .../background_jobs/core.py | 307 - .../background_jobs/main.py | 75 - .../background_jobs/utils.py | 49 - .../renku_data_services/data_api/__init__.py | 4 + bases/renku_data_services/data_api/app.py | 239 +- bases/renku_data_services/data_api/config.py | 79 + .../data_api/dependencies.py | 420 + bases/renku_data_services/data_api/main.py | 130 +- .../data_tasks/__init__.py | 5 + .../renku_data_services/data_tasks/config.py | 85 + .../data_tasks/dependencies.py | 87 + bases/renku_data_services/data_tasks/main.py | 45 + .../renku_data_services/data_tasks}/py.typed | 0 .../data_tasks/task_defs.py | 406 + .../renku_data_services/data_tasks/taskman.py | 205 + .../data_tasks/tcp_handler.py | 63 + .../renku_data_services/k8s_cache/__init__.py | 5 + bases/renku_data_services/k8s_cache/config.py | 76 + .../k8s_cache/dependencies.py | 80 + bases/renku_data_services/k8s_cache/main.py | 49 + .../renku_data_services/k8s_cache/py.typed | 0 .../secrets_storage_api/__init__.py | 4 + .../secrets_storage_api/app.py | 16 +- .../secrets_storage_api/config.py | 54 + .../secrets_storage_api/dependencies.py | 64 + .../secrets_storage_api/main.py | 33 +- chartpress.yaml | 10 +- .../app_config/__init__.py | 2 - .../renku_data_services/app_config/config.py | 592 +- .../renku_data_services/app_config/logging.py | 335 + .../renku_data_services/authn/keycloak.py | 12 +- components/renku_data_services/authz/authz.py | 427 +- .../renku_data_services/authz/config.py | 10 +- .../renku_data_services/authz/models.py | 43 +- .../renku_data_services/authz/schemas.py | 237 +- .../base_api/error_handler.py | 2 +- .../renku_data_services/base_api/misc.py | 4 +- .../base_api/pagination.py | 47 +- .../base_models/__init__.py | 2 +- .../renku_data_services/base_models/core.py | 228 +- .../base_models/metrics.py | 103 + .../renku_data_services/base_models/nel.py | 107 + .../base_models/validation.py | 11 +- .../connected_services/apispec.py | 62 +- .../connected_services/blueprints.py | 4 +- .../connected_services/db.py | 34 +- .../renku_data_services/crc/api.spec.yaml | 656 +- components/renku_data_services/crc/apispec.py | 492 +- .../renku_data_services/crc/apispec_base.py | 46 +- .../renku_data_services/crc/blueprints.py | 106 +- components/renku_data_services/crc/core.py | 38 + components/renku_data_services/crc/db.py | 195 +- components/renku_data_services/crc/models.py | 173 +- components/renku_data_services/crc/orm.py | 85 +- .../crc}/server_options.py | 4 +- .../data_connectors/api.spec.yaml | 190 +- .../data_connectors/apispec.py | 159 +- .../data_connectors/blueprints.py | 176 +- .../data_connectors/core.py | 203 +- .../renku_data_services/data_connectors/db.py | 670 +- .../data_connectors/doi/__init__.py | 1 + .../data_connectors/doi/metadata.py | 120 + .../data_connectors/doi/models.py | 69 + .../data_connectors/migration_utils.py | 12 +- .../data_connectors/models.py | 60 +- .../data_connectors/orm.py | 40 +- .../renku_data_services/db_config/config.py | 28 +- .../renku_data_services/errors/__init__.py | 2 +- .../renku_data_services/errors/errors.py | 20 + .../k8s/client_interfaces.py | 5 + components/renku_data_services/k8s/clients.py | 257 +- components/renku_data_services/k8s/config.py | 117 + .../renku_data_services/k8s/constants.py | 17 + components/renku_data_services/k8s/models.py | 275 + components/renku_data_services/k8s/quota.py | 37 +- .../k8s_watcher/__init__.py | 7 + .../renku_data_services/k8s_watcher/core.py | 220 + .../renku_data_services/k8s_watcher/db.py | 102 + .../renku_data_services/k8s_watcher/orm.py | 127 + .../message_queue/api.spec.yaml | 111 - .../message_queue/apispec.py | 42 - .../avro_models/io/renku/__init__.py | 0 .../avro_models/io/renku/events/__init__.py | 1 - .../avro_models/io/renku/events/header.py | 81 - .../io/renku/events/v1/__init__.py | 12 - .../avro_models/io/renku/events/v1/header.py | 81 - .../events/v1/project_authorization_added.py | 64 - .../v1/project_authorization_removed.py | 57 - .../v1/project_authorization_updated.py | 64 - .../io/renku/events/v1/project_created.py | 109 - .../io/renku/events/v1/project_member_role.py | 9 - .../io/renku/events/v1/project_removed.py | 52 - .../io/renku/events/v1/project_updated.py | 95 - .../io/renku/events/v1/user_added.py | 76 - .../io/renku/events/v1/user_removed.py | 52 - .../io/renku/events/v1/user_updated.py | 76 - .../io/renku/events/v1/visibility.py | 9 - .../io/renku/events/v2/__init__.py | 19 - .../io/renku/events/v2/group_added.py | 70 - .../io/renku/events/v2/group_member_added.py | 64 - .../renku/events/v2/group_member_removed.py | 57 - .../renku/events/v2/group_member_updated.py | 64 - .../io/renku/events/v2/group_removed.py | 52 - .../io/renku/events/v2/group_updated.py | 70 - .../io/renku/events/v2/member_role.py | 10 - .../io/renku/events/v2/project_created.py | 114 - .../renku/events/v2/project_member_added.py | 64 - .../renku/events/v2/project_member_removed.py | 57 - .../renku/events/v2/project_member_updated.py | 64 - .../io/renku/events/v2/project_removed.py | 52 - .../io/renku/events/v2/project_updated.py | 100 - .../events/v2/reprovisioning_finished.py | 52 - .../renku/events/v2/reprovisioning_started.py | 52 - .../io/renku/events/v2/user_added.py | 81 - .../io/renku/events/v2/user_removed.py | 52 - .../io/renku/events/v2/user_updated.py | 81 - .../io/renku/events/v2/visibility.py | 9 - .../message_queue/blueprints.py | 80 - .../message_queue/config.py | 79 - .../message_queue/converters.py | 389 - .../renku_data_services/message_queue/core.py | 120 - .../renku_data_services/message_queue/db.py | 79 +- .../message_queue/events.py | 21 - .../message_queue/generate_models.py | 177 - .../message_queue/interface.py | 13 - .../message_queue/models.py | 83 - .../renku_data_services/message_queue/orm.py | 64 +- .../message_queue/redis_queue.py | 90 - .../schemas/.github/workflows/ci.yml | 22 - .../message_queue/schemas/LICENSE | 201 - .../message_queue/schemas/README.md | 4 - .../schemas/common/v1/headers.avsc | 35 - .../schemas/common/v1/visibility.avsc | 10 - .../message_queue/schemas/header/headers.avsc | 35 - .../schemas/project/v1/asyncapi.yaml | 92 - .../v1/events/authorization_added.avsc | 20 - .../v1/events/authorization_removed.avsc | 16 - .../v1/events/authorization_updated.avsc | 20 - .../schemas/project/v1/events/created.avsc | 58 - .../schemas/project/v1/events/removed.avsc | 12 - .../schemas/project/v1/events/role.avsc | 10 - .../schemas/project/v1/events/updated.avsc | 47 - .../schemas/user/v1/asyncapi.yaml | 56 - .../schemas/user/v1/events/added.avsc | 27 - .../schemas/user/v1/events/removed.avsc | 13 - .../schemas/user/v1/events/updated.avsc | 24 - .../schemas/v1/common/headers.avsc | 35 - .../schemas/v1/common/visibility.avsc | 10 - .../schemas/v1/project/asyncapi.yaml | 92 - .../project/events/authorization_added.avsc | 20 - .../project/events/authorization_removed.avsc | 16 - .../project/events/authorization_updated.avsc | 20 - .../schemas/v1/project/events/created.avsc | 58 - .../schemas/v1/project/events/removed.avsc | 12 - .../schemas/v1/project/events/role.avsc | 10 - .../schemas/v1/project/events/updated.avsc | 47 - .../schemas/v1/user/asyncapi.yaml | 56 - .../schemas/v1/user/events/added.avsc | 27 - .../schemas/v1/user/events/removed.avsc | 13 - .../schemas/v1/user/events/updated.avsc | 24 - .../message_queue/schemas/v2/asyncapi.yaml | 46 - .../message_queue/schemas/v2/common/role.avsc | 11 - .../schemas/v2/common/visibility.avsc | 10 - .../schemas/v2/group/asyncapi.yaml | 92 - .../schemas/v2/group/events/added.avsc | 24 - .../schemas/v2/group/events/member_added.avsc | 20 - .../v2/group/events/member_removed.avsc | 16 - .../v2/group/events/member_updated.avsc | 20 - .../schemas/v2/group/events/removed.avsc | 13 - .../schemas/v2/group/events/updated.avsc | 24 - .../events/reprovisioning_finished.avsc | 12 - .../notify/events/reprovisioning_started.avsc | 12 - .../schemas/v2/project/asyncapi.yaml | 92 - .../schemas/v2/project/events/created.avsc | 62 - .../v2/project/events/member_added.avsc | 20 - .../v2/project/events/member_removed.avsc | 16 - .../v2/project/events/member_updated.avsc | 20 - .../schemas/v2/project/events/removed.avsc | 12 - .../schemas/v2/project/events/updated.avsc | 51 - .../schemas/v2/user/asyncapi.yaml | 56 - .../schemas/v2/user/events/added.avsc | 31 - .../schemas/v2/user/events/removed.avsc | 13 - .../schemas/v2/user/events/updated.avsc | 28 - .../renku_data_services/metrics/__init__.py | 1 + .../renku_data_services/metrics/core.py | 95 + components/renku_data_services/metrics/db.py | 39 + components/renku_data_services/metrics/orm.py | 45 + .../io/__init__.py => metrics/py.typed} | 0 .../renku_data_services/metrics/utils.py | 12 + .../renku_data_services/migrations/core.py | 2 +- .../renku_data_services/migrations/env.py | 14 +- .../renku_data_services/migrations/utils.py | 16 - ...d8e_add_dismmiss_migration_banner_user_.py | 32 + ...8f053_support_env_variables_in_sessions.py | 37 + .../1d2f64a405aa_create_the_cluster_table.py | 71 + ...eac_update_for_custom_environment_build.py | 70 + ...9854e7ea77_add_non_public_read_relation.py | 37 + ...f387ef9a_set_check_constraint_for_data_.py | 33 + ...322f8c5f4eb0_migrate_slug_entity_tables.py | 86 + ...0996_create_initial_global_environments.py | 151 + ...483af0d70cf4_migrate_authz_schema_to_v6.py | 37 + ...9_add_authorization_for_data_connectors.py | 9 +- .../559b1fc46cfe_add_project_migrations.py | 47 + ...64edf7ac0de0_create_search_update_table.py | 48 + ...654104_authz_add_public_viewer_to_group.py | 9 +- .../versions/71ef5efe740f_add_builds.py | 45 + ..._migrate_copied_custom_environments_to_.py | 106 + .../77f46efc541d_add_session_secrets.py | 84 + .../8413f10ef77f_modify_stored_procedures.py | 49 + ...cbec_add_global_slug_to_data_connectors.py | 37 + ...f_add_disk_storage_to_session_launchers.py | 28 + ..._update_sessions_build_parameters_table.py | 32 + ...ec9a_make_data_connector_ids_unique_in_.py | 37 + .../a59e60e0338f_update_user_secrets.py | 62 + ...35a_slugs_for_data_connector_which_are_.py | 45 + ..._ingress_parameters_for_remote_clusters.py | 55 + ...787_add_service_account_name_to_cluster.py | 30 + .../ca87e5b43a44_create_k8s_cache_tables.py | 60 + .../ce1a111d6694_authz_add_initial_schema.py | 9 +- ...da91a3a6a6_add_exclusive_role_relations.py | 37 + ...add_secrets_mount_directory_to_projects.py | 40 + ...95d30_allow_environments_to_be_archived.py | 32 + ...cb9648c3c15_make_group_a_separate_field.py | 36 + ...5fabf6_create_staging_table_for_metrics.py | 42 + ...b87ddd954_migrate_namespaces_to_authzed.py | 14 - ...23_cleanup_data_connector_when_slug_is_.py | 50 + ...82a_authz_add_groups_and_namespaces_in_.py | 9 +- ...fe3b7470d226_remove_redis_message_queue.py | 43 + .../namespace/api.spec.yaml | 160 +- .../renku_data_services/namespace/apispec.py | 138 +- .../namespace/apispec_base.py | 8 + .../namespace/apispec_enhanced.py | 18 + .../namespace/blueprints.py | 91 +- .../renku_data_services/namespace/db.py | 405 +- .../renku_data_services/namespace/models.py | 65 +- .../renku_data_services/namespace/orm.py | 154 +- .../notebooks/api.spec.yaml | 45 +- .../api/amalthea_patches/cloudstorage.py | 8 +- .../notebooks/api/amalthea_patches/general.py | 70 +- .../api/amalthea_patches/git_proxy.py | 6 +- .../api/amalthea_patches/git_sidecar.py | 15 +- .../api/amalthea_patches/init_containers.py | 99 +- .../amalthea_patches/inject_certificates.py | 4 +- .../api/amalthea_patches/jupyter_server.py | 19 +- .../notebooks/api/classes/data_service.py | 106 +- .../notebooks/api/classes/image.py | 8 +- .../notebooks/api/classes/k8s_client.py | 716 +- .../notebooks/api/classes/server.py | 191 +- .../notebooks/api/classes/user.py | 13 +- .../notebooks/api/schemas/cloud_storage.py | 145 +- .../notebooks/api/schemas/servers_get.py | 8 +- .../notebooks/api/schemas/servers_post.py | 1 + .../renku_data_services/notebooks/apispec.py | 24 +- .../notebooks/blueprints.py | 609 +- .../notebooks/config/__init__.py | 149 +- .../notebooks/config/dynamic.py | 35 +- .../notebooks/constants.py | 8 + .../renku_data_services/notebooks/core.py | 263 +- .../notebooks/core_sessions.py | 658 + .../notebooks/cr_amalthea_session.py | 853 +- .../notebooks/cr_jupyter_server.py | 40 +- .../renku_data_services/notebooks/crs.py | 158 +- .../notebooks/errors/intermittent.py | 12 +- .../renku_data_services/notebooks/models.py | 45 +- .../notebooks/util/kubernetes_.py | 36 +- .../notebooks/util/repository.py | 5 +- .../renku_data_services/notebooks/utils.py | 60 +- .../renku_data_services/platform/apispec.py | 18 +- .../renku_data_services/platform/models.py | 2 +- .../renku_data_services/project/api.spec.yaml | 500 +- .../renku_data_services/project/apispec.py | 381 +- .../project/apispec_base.py | 12 +- .../renku_data_services/project/blueprints.py | 276 +- .../renku_data_services/project/constants.py | 41 + .../renku_data_services/project/core.py | 168 +- components/renku_data_services/project/db.py | 627 +- .../renku_data_services/project/models.py | 135 +- components/renku_data_services/project/orm.py | 167 +- .../repositories/apispec.py | 18 +- .../renku_data_services/repositories/db.py | 11 +- .../repositories/provider_adapters.py | 4 +- .../renku_data_services/repositories/utils.py | 9 +- .../renku_data_services/search/__init__.py | 1 + .../renku_data_services/search/api.spec.yaml | 535 + .../renku_data_services/search/apispec.py | 195 + .../{message_queue => search}/apispec_base.py | 0 .../renku_data_services/search/authz.py | 96 + .../renku_data_services/search/blueprints.py | 101 + .../renku_data_services/search/converters.py | 148 + components/renku_data_services/search/core.py | 173 + components/renku_data_services/search/db.py | 274 + .../renku_data_services/search/decorators.py | 113 + .../renku_data_services/search/models.py | 52 + components/renku_data_services/search/orm.py | 53 + .../search/query_manual.md | 377 + .../search/query_manual.py | 49 + .../renku_data_services/search/reprovision.py | 147 + .../renku_data_services/search/solr_token.py | 185 + .../search/solr_user_query.py | 433 + .../renku_data_services/search/user_query.py | 1024 + .../search/user_query_parser.py | 226 + .../search/user_query_process.py | 128 + .../renku_data_services/secrets/api.spec.yaml | 7 +- .../renku_data_services/secrets/apispec.py | 34 +- .../renku_data_services/secrets/blueprints.py | 4 +- .../renku_data_services/secrets/config.py | 157 +- .../renku_data_services/secrets/core.py | 50 +- components/renku_data_services/secrets/db.py | 241 +- .../renku_data_services/secrets/models.py | 62 +- components/renku_data_services/secrets/orm.py | 65 +- .../renku_data_services/session/api.spec.yaml | 482 +- .../renku_data_services/session/apispec.py | 400 +- .../session/apispec_extras.py | 13 + .../renku_data_services/session/blueprints.py | 112 +- .../renku_data_services/session/config.py | 87 + .../renku_data_services/session/constants.py | 63 + .../renku_data_services/session/core.py | 290 +- .../renku_data_services/session/cr_base.py | 12 + .../session/cr_shipwright_buildrun.py | 3713 ++ .../session/cr_tekton_taskrun.py | 23 + components/renku_data_services/session/crs.py | 65 + components/renku_data_services/session/db.py | 644 +- .../renku_data_services/session/k8s_client.py | 317 + .../renku_data_services/session/models.py | 238 +- components/renku_data_services/session/orm.py | 127 +- .../renku_data_services/solr/__init__.py | 1 + .../solr/entity_documents.py | 313 + .../renku_data_services/solr/entity_schema.py | 158 + .../renku_data_services/solr/solr_client.py | 803 + .../renku_data_services/solr/solr_migrate.py | 280 + .../renku_data_services/solr/solr_schema.py | 439 + .../renku_data_services/storage/README.md | 2 +- .../renku_data_services/storage/api.spec.yaml | 21 +- .../renku_data_services/storage/apispec.py | 85 +- .../renku_data_services/storage/blueprints.py | 24 +- .../renku_data_services/storage/rclone.py | 353 +- .../storage/rclone_patches.py | 258 + .../storage/rclone_schema.autogenerated.json | 37489 +++++++++------- .../renku_data_services/users/api.spec.yaml | 83 +- .../renku_data_services/users/apispec.py | 102 +- .../renku_data_services/users/blueprints.py | 113 +- .../renku_data_services/users/config.py | 8 + components/renku_data_services/users/core.py | 24 + components/renku_data_services/users/db.py | 115 +- .../renku_data_services/users/kc_api.py | 32 +- .../renku_data_services/users/models.py | 25 +- components/renku_data_services/users/orm.py | 20 +- components/renku_data_services/utils/etag.py | 23 +- .../manifests/buildrun.yaml | 32 + .../manifests/buildstrategy.yaml | 90 + development/.keep | 0 flake.lock | 176 +- flake.nix | 262 +- helm-chart/renku-data-services/values.yaml | 8 + poetry.lock | 4971 +- projects/background_jobs/poetry.lock | 2671 -- projects/k8s_watcher/Dockerfile | 41 + projects/k8s_watcher/poetry.lock | 3420 ++ projects/k8s_watcher/pyproject.toml | 93 + projects/renku_data_service/Dockerfile | 26 +- projects/renku_data_service/poetry.lock | 3616 +- projects/renku_data_service/pyproject.toml | 56 +- .../Dockerfile | 13 +- projects/renku_data_tasks/poetry.lock | 3373 ++ .../pyproject.toml | 100 +- projects/secrets_storage/Dockerfile | 9 +- projects/secrets_storage/poetry.lock | 2922 +- projects/secrets_storage/pyproject.toml | 56 +- pyproject.toml | 103 +- registries.yaml | 4 + setup-k3d-cluster.sh | 178 + .../background_jobs/test_utils.py | 62 - .../data_api/__snapshots__/test_projects.ambr | 95 + .../data_api/__snapshots__/test_sessions.ambr | 132 + .../data_api/__snapshots__/test_storage.ambr | 18067 ++++++++ .../__snapshots__/test_user_preferences.ambr | 31 + .../renku_data_services/data_api/conftest.py | 350 +- .../data_api/test_clusters.py | 228 + .../data_api/test_connected_services.py | 12 +- .../data_api/test_data_connectors.py | 1163 +- .../data_api/test_groups.py | 146 +- .../data_api/test_message_queue.py | 122 - .../data_api/test_metrics.py | 49 + .../data_api/test_migrations.py | 432 +- .../data_api/test_namespaces.py | 336 + .../data_api/test_notebooks.py | 217 +- .../test_parsing_old_server_options.py | 2 +- .../data_api/test_platform_config.py | 4 +- .../data_api/test_projects.py | 652 +- .../data_api/test_repositories.py | 33 +- .../data_api/test_resource_pools.py | 296 +- .../data_api/test_schemathesis.py | 46 +- .../data_api/test_search.py | 265 + .../data_api/test_secret.py | 91 +- .../data_api/test_session_secrets.py | 628 + .../data_api/test_sessions.py | 1073 +- .../data_api/test_smoke.py | 4 +- .../data_api/test_storage.py | 64 +- .../data_api/test_user_preferences.py | 64 +- .../data_api/test_users.py | 16 +- .../renku_data_services/data_api/utils.py | 50 +- .../test_sync.py | 578 +- .../data_tasks/test_taskman.py | 123 + .../app_config/test_logging.py | 119 + .../authz/test_authorization.py | 130 +- .../renku_data_services/authz/test_schemas.py | 231 + .../base_models/test_nel.py | 61 + .../base_models/test_slugs.py | 1 - .../connected_services/test_encryption.py | 10 +- .../crc_models/hypothesis.py | 4 +- .../data_api/test_config.py | 33 +- .../db/test_sqlalchemy_pool_repo.py | 68 +- .../db/test_sqlalchemy_storage_repo.py | 18 +- .../test_sqlalchemy_user_preferences_repo.py | 32 +- .../k8s/test_k8s_adapter.py | 93 + .../message_queue/__init__.py | 0 .../message_queue/test_EventORM.py | 28 - .../message_queue/test_queue.py | 63 - .../search/test_SearchUpdatesORM.py | 25 + .../renku_data_services/search/test_core.py | 80 + .../renku_data_services/search/test_db.py | 174 + .../search/test_query_manual.py | 38 + .../search/test_reprovision.py | 187 + .../search/test_solr_token.py | 136 + .../search/test_solr_user_query.py | 176 + .../search/test_user_query.py | 210 + .../search/test_user_query_parser.py | 480 + .../search/test_user_query_process.py | 76 + .../solr/test_entity_documents.py | 287 + .../solr/test_solr_client.py | 361 + .../solr/test_solr_migrate.py | 41 + .../solr/test_solr_schema.py | 53 + .../renku_data_services/users/test_db.py | 43 + .../test_environment_build.py | 90 + test/conftest.py | 199 +- test/utils.py | 285 +- user/v1/asyncapi.yaml | 56 - user/v1/events/added.avsc | 24 - user/v1/events/removed.avsc | 13 - user/v1/events/updated.avsc | 24 - 459 files changed, 91680 insertions(+), 37459 deletions(-) create mode 100644 .devcontainer/rclone/devcontainer-feature.json create mode 100644 .devcontainer/rclone/install.sh create mode 100644 .devcontainer/solr/devcontainer-feature.json create mode 100644 .devcontainer/solr/install.sh create mode 100644 .github/pull_request_template/feature.md create mode 100644 .idea/runConfigurations/_template__of_py_test.xml create mode 100644 .local_env delete mode 100644 bases/renku_data_services/background_jobs/__init__.py delete mode 100644 bases/renku_data_services/background_jobs/config.py delete mode 100644 bases/renku_data_services/background_jobs/core.py delete mode 100644 bases/renku_data_services/background_jobs/main.py delete mode 100644 bases/renku_data_services/background_jobs/utils.py create mode 100644 bases/renku_data_services/data_api/config.py create mode 100644 bases/renku_data_services/data_api/dependencies.py create mode 100644 bases/renku_data_services/data_tasks/__init__.py create mode 100644 bases/renku_data_services/data_tasks/config.py create mode 100644 bases/renku_data_services/data_tasks/dependencies.py create mode 100644 bases/renku_data_services/data_tasks/main.py rename {components/renku_data_services/message_queue => bases/renku_data_services/data_tasks}/py.typed (100%) create mode 100644 bases/renku_data_services/data_tasks/task_defs.py create mode 100644 bases/renku_data_services/data_tasks/taskman.py create mode 100644 bases/renku_data_services/data_tasks/tcp_handler.py create mode 100644 bases/renku_data_services/k8s_cache/__init__.py create mode 100644 bases/renku_data_services/k8s_cache/config.py create mode 100644 bases/renku_data_services/k8s_cache/dependencies.py create mode 100644 bases/renku_data_services/k8s_cache/main.py rename components/renku_data_services/message_queue/avro_models/__init__.py => bases/renku_data_services/k8s_cache/py.typed (100%) create mode 100644 bases/renku_data_services/secrets_storage_api/config.py create mode 100644 bases/renku_data_services/secrets_storage_api/dependencies.py create mode 100644 components/renku_data_services/app_config/logging.py create mode 100644 components/renku_data_services/base_models/metrics.py create mode 100644 components/renku_data_services/base_models/nel.py create mode 100644 components/renku_data_services/crc/core.py rename {bases/renku_data_services/data_api => components/renku_data_services/crc}/server_options.py (96%) create mode 100644 components/renku_data_services/data_connectors/doi/__init__.py create mode 100644 components/renku_data_services/data_connectors/doi/metadata.py create mode 100644 components/renku_data_services/data_connectors/doi/models.py create mode 100644 components/renku_data_services/k8s/config.py create mode 100644 components/renku_data_services/k8s/constants.py create mode 100644 components/renku_data_services/k8s/models.py create mode 100644 components/renku_data_services/k8s_watcher/__init__.py create mode 100644 components/renku_data_services/k8s_watcher/core.py create mode 100644 components/renku_data_services/k8s_watcher/db.py create mode 100644 components/renku_data_services/k8s_watcher/orm.py delete mode 100644 components/renku_data_services/message_queue/api.spec.yaml delete mode 100644 components/renku_data_services/message_queue/apispec.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/__init__.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/__init__.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/header.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/__init__.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/header.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_added.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_created.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_member_role.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_added.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v1/visibility.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/__init__.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_added.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_added.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/member_role.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_created.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_added.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_finished.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_started.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_added.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_removed.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_updated.py delete mode 100644 components/renku_data_services/message_queue/avro_models/io/renku/events/v2/visibility.py delete mode 100644 components/renku_data_services/message_queue/blueprints.py delete mode 100644 components/renku_data_services/message_queue/config.py delete mode 100644 components/renku_data_services/message_queue/converters.py delete mode 100644 components/renku_data_services/message_queue/core.py delete mode 100644 components/renku_data_services/message_queue/events.py delete mode 100644 components/renku_data_services/message_queue/generate_models.py delete mode 100644 components/renku_data_services/message_queue/interface.py delete mode 100644 components/renku_data_services/message_queue/redis_queue.py delete mode 100644 components/renku_data_services/message_queue/schemas/.github/workflows/ci.yml delete mode 100644 components/renku_data_services/message_queue/schemas/LICENSE delete mode 100644 components/renku_data_services/message_queue/schemas/README.md delete mode 100644 components/renku_data_services/message_queue/schemas/common/v1/headers.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/common/v1/visibility.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/header/headers.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/events/authorization_added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/events/authorization_removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/events/authorization_updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/events/created.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/events/removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/events/role.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/project/v1/events/updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/user/v1/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/user/v1/events/added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/user/v1/events/removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/user/v1/events/updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/common/headers.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/common/visibility.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/events/authorization_added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/events/authorization_removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/events/authorization_updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/events/created.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/events/removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/events/role.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/project/events/updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/user/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/v1/user/events/added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/user/events/removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v1/user/events/updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/v2/common/role.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/common/visibility.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/group/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/v2/group/events/added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/group/events/member_added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/group/events/member_removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/group/events/member_updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/group/events/removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/group/events/updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_finished.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_started.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/project/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/v2/project/events/created.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/project/events/member_added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/project/events/member_removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/project/events/member_updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/project/events/removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/project/events/updated.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/user/asyncapi.yaml delete mode 100644 components/renku_data_services/message_queue/schemas/v2/user/events/added.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/user/events/removed.avsc delete mode 100644 components/renku_data_services/message_queue/schemas/v2/user/events/updated.avsc create mode 100644 components/renku_data_services/metrics/__init__.py create mode 100644 components/renku_data_services/metrics/core.py create mode 100644 components/renku_data_services/metrics/db.py create mode 100644 components/renku_data_services/metrics/orm.py rename components/renku_data_services/{message_queue/avro_models/io/__init__.py => metrics/py.typed} (100%) create mode 100644 components/renku_data_services/metrics/utils.py create mode 100644 components/renku_data_services/migrations/versions/042eeb50cd8e_add_dismmiss_migration_banner_user_.py create mode 100644 components/renku_data_services/migrations/versions/0c205e28f053_support_env_variables_in_sessions.py create mode 100644 components/renku_data_services/migrations/versions/1d2f64a405aa_create_the_cluster_table.py create mode 100644 components/renku_data_services/migrations/versions/1e296d744eac_update_for_custom_environment_build.py create mode 100644 components/renku_data_services/migrations/versions/239854e7ea77_add_non_public_read_relation.py create mode 100644 components/renku_data_services/migrations/versions/2d3bf387ef9a_set_check_constraint_for_data_.py create mode 100644 components/renku_data_services/migrations/versions/322f8c5f4eb0_migrate_slug_entity_tables.py create mode 100644 components/renku_data_services/migrations/versions/450ae3930996_create_initial_global_environments.py create mode 100644 components/renku_data_services/migrations/versions/483af0d70cf4_migrate_authz_schema_to_v6.py create mode 100644 components/renku_data_services/migrations/versions/559b1fc46cfe_add_project_migrations.py create mode 100644 components/renku_data_services/migrations/versions/64edf7ac0de0_create_search_update_table.py create mode 100644 components/renku_data_services/migrations/versions/71ef5efe740f_add_builds.py create mode 100644 components/renku_data_services/migrations/versions/75c83dd9d619_migrate_copied_custom_environments_to_.py create mode 100644 components/renku_data_services/migrations/versions/77f46efc541d_add_session_secrets.py create mode 100644 components/renku_data_services/migrations/versions/8413f10ef77f_modify_stored_procedures.py create mode 100644 components/renku_data_services/migrations/versions/8d67347dcbec_add_global_slug_to_data_connectors.py create mode 100644 components/renku_data_services/migrations/versions/939c7c649bef_add_disk_storage_to_session_launchers.py create mode 100644 components/renku_data_services/migrations/versions/9ec3d97e3b3d_update_sessions_build_parameters_table.py create mode 100644 components/renku_data_services/migrations/versions/a1f7f5fbec9a_make_data_connector_ids_unique_in_.py create mode 100644 components/renku_data_services/migrations/versions/a59e60e0338f_update_user_secrets.py create mode 100644 components/renku_data_services/migrations/versions/b0a52ff8335a_slugs_for_data_connector_which_are_.py create mode 100644 components/renku_data_services/migrations/versions/b402b9d584bf_add_ingress_parameters_for_remote_clusters.py create mode 100644 components/renku_data_services/migrations/versions/ca3731b65787_add_service_account_name_to_cluster.py create mode 100644 components/renku_data_services/migrations/versions/ca87e5b43a44_create_k8s_cache_tables.py create mode 100644 components/renku_data_services/migrations/versions/cfda91a3a6a6_add_exclusive_role_relations.py create mode 100644 components/renku_data_services/migrations/versions/d1cdcbb2adc3_add_secrets_mount_directory_to_projects.py create mode 100644 components/renku_data_services/migrations/versions/d71f0f795d30_allow_environments_to_be_archived.py create mode 100644 components/renku_data_services/migrations/versions/dcb9648c3c15_make_group_a_separate_field.py create mode 100644 components/renku_data_services/migrations/versions/ee719a5fabf6_create_staging_table_for_metrics.py create mode 100644 components/renku_data_services/migrations/versions/f4ad62b7b323_cleanup_data_connector_when_slug_is_.py create mode 100644 components/renku_data_services/migrations/versions/fe3b7470d226_remove_redis_message_queue.py create mode 100644 components/renku_data_services/namespace/apispec_enhanced.py create mode 100644 components/renku_data_services/notebooks/constants.py create mode 100644 components/renku_data_services/notebooks/core_sessions.py create mode 100644 components/renku_data_services/project/constants.py create mode 100644 components/renku_data_services/search/__init__.py create mode 100644 components/renku_data_services/search/api.spec.yaml create mode 100644 components/renku_data_services/search/apispec.py rename components/renku_data_services/{message_queue => search}/apispec_base.py (100%) create mode 100644 components/renku_data_services/search/authz.py create mode 100644 components/renku_data_services/search/blueprints.py create mode 100644 components/renku_data_services/search/converters.py create mode 100644 components/renku_data_services/search/core.py create mode 100644 components/renku_data_services/search/db.py create mode 100644 components/renku_data_services/search/decorators.py create mode 100644 components/renku_data_services/search/models.py create mode 100644 components/renku_data_services/search/orm.py create mode 100644 components/renku_data_services/search/query_manual.md create mode 100644 components/renku_data_services/search/query_manual.py create mode 100644 components/renku_data_services/search/reprovision.py create mode 100644 components/renku_data_services/search/solr_token.py create mode 100644 components/renku_data_services/search/solr_user_query.py create mode 100644 components/renku_data_services/search/user_query.py create mode 100644 components/renku_data_services/search/user_query_parser.py create mode 100644 components/renku_data_services/search/user_query_process.py create mode 100644 components/renku_data_services/session/apispec_extras.py create mode 100644 components/renku_data_services/session/config.py create mode 100644 components/renku_data_services/session/constants.py create mode 100644 components/renku_data_services/session/cr_base.py create mode 100644 components/renku_data_services/session/cr_shipwright_buildrun.py create mode 100644 components/renku_data_services/session/cr_tekton_taskrun.py create mode 100644 components/renku_data_services/session/crs.py create mode 100644 components/renku_data_services/session/k8s_client.py create mode 100644 components/renku_data_services/solr/__init__.py create mode 100644 components/renku_data_services/solr/entity_documents.py create mode 100644 components/renku_data_services/solr/entity_schema.py create mode 100644 components/renku_data_services/solr/solr_client.py create mode 100644 components/renku_data_services/solr/solr_migrate.py create mode 100644 components/renku_data_services/solr/solr_schema.py create mode 100644 components/renku_data_services/storage/rclone_patches.py create mode 100644 components/renku_data_services/users/core.py create mode 100644 components/renku_pack_builder/manifests/buildrun.yaml create mode 100644 components/renku_pack_builder/manifests/buildstrategy.yaml delete mode 100644 development/.keep delete mode 100644 projects/background_jobs/poetry.lock create mode 100644 projects/k8s_watcher/Dockerfile create mode 100644 projects/k8s_watcher/poetry.lock create mode 100644 projects/k8s_watcher/pyproject.toml rename projects/{background_jobs => renku_data_tasks}/Dockerfile (77%) create mode 100644 projects/renku_data_tasks/poetry.lock rename projects/{background_jobs => renku_data_tasks}/pyproject.toml (67%) create mode 100644 registries.yaml create mode 100755 setup-k3d-cluster.sh delete mode 100644 test/bases/renku_data_services/background_jobs/test_utils.py create mode 100644 test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr create mode 100644 test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr create mode 100644 test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr create mode 100644 test/bases/renku_data_services/data_api/__snapshots__/test_user_preferences.ambr create mode 100644 test/bases/renku_data_services/data_api/test_clusters.py delete mode 100644 test/bases/renku_data_services/data_api/test_message_queue.py create mode 100644 test/bases/renku_data_services/data_api/test_metrics.py create mode 100644 test/bases/renku_data_services/data_api/test_search.py create mode 100644 test/bases/renku_data_services/data_api/test_session_secrets.py rename test/bases/renku_data_services/{background_jobs => data_tasks}/test_sync.py (60%) create mode 100644 test/bases/renku_data_services/data_tasks/test_taskman.py create mode 100644 test/components/renku_data_services/app_config/test_logging.py create mode 100644 test/components/renku_data_services/base_models/test_nel.py delete mode 100644 test/components/renku_data_services/message_queue/__init__.py delete mode 100644 test/components/renku_data_services/message_queue/test_EventORM.py delete mode 100644 test/components/renku_data_services/message_queue/test_queue.py create mode 100644 test/components/renku_data_services/search/test_SearchUpdatesORM.py create mode 100644 test/components/renku_data_services/search/test_core.py create mode 100644 test/components/renku_data_services/search/test_db.py create mode 100644 test/components/renku_data_services/search/test_query_manual.py create mode 100644 test/components/renku_data_services/search/test_reprovision.py create mode 100644 test/components/renku_data_services/search/test_solr_token.py create mode 100644 test/components/renku_data_services/search/test_solr_user_query.py create mode 100644 test/components/renku_data_services/search/test_user_query.py create mode 100644 test/components/renku_data_services/search/test_user_query_parser.py create mode 100644 test/components/renku_data_services/search/test_user_query_process.py create mode 100644 test/components/renku_data_services/solr/test_entity_documents.py create mode 100644 test/components/renku_data_services/solr/test_solr_client.py create mode 100644 test/components/renku_data_services/solr/test_solr_migrate.py create mode 100644 test/components/renku_data_services/solr/test_solr_schema.py create mode 100644 test/components/renku_data_services/users/test_db.py create mode 100644 test/components/renku_pack_builder/test_environment_build.py delete mode 100644 user/v1/asyncapi.yaml delete mode 100644 user/v1/events/added.avsc delete mode 100644 user/v1/events/removed.avsc delete mode 100644 user/v1/events/updated.avsc diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 189ef000b..1ad43a3ad 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -6,16 +6,16 @@ "workspaceFolder": "/workspace", "shutdownAction": "stopCompose", "features": { - "ghcr.io/devcontainers-contrib/features/poetry:2": {}, - "ghcr.io/devcontainers-contrib/features/bash-command:1": { + "ghcr.io/devcontainers-extra/features/poetry:2": {}, + "ghcr.io/devcontainers-extra/features/bash-command:1": { "command": "poetry self add poetry-polylith-plugin" }, "ghcr.io/devcontainers/features/docker-in-docker:2": {}, - "ghcr.io/devcontainers-contrib/features/gh-release:1": { + "ghcr.io/devcontainers-extra/features/gh-release:1": { "repo": "authzed/zed", "binaryNames": "zed" }, - "ghcr.io/devcontainers-contrib/features/spicedb:1": {}, + "ghcr.io/devcontainers-extra/features/spicedb:1": {}, "ghcr.io/devcontainers/features/kubectl-helm-minikube:1": { "minikube": "none" }, @@ -23,28 +23,38 @@ "jqVersion": "latest", "yqVersion": "latest" }, - "ghcr.io/dhoeric/features/k9s:1": {}, + "ghcr.io/dhoeric/features/k9s:1": { + "version": "0.40.9" + }, "ghcr.io/EliiseS/devcontainer-features/bash-profile:1": { "command": "alias k=kubectl" }, - "ghcr.io/devcontainers-contrib/features/rclone:1": {}, - "./k3d": {} + "./k3d": {}, + "ghcr.io/devcontainers/features/java:1": { + "version": "21", + "jdkDistro": "open" + }, + "./solr": {}, + "ghcr.io/devcontainers/features/go:1": {}, + "./rclone": { + "rclone_repository": "https://github.com/SwissDataScienceCenter/rclone.git", + "rclone_ref": "v1.70.0+renku-1" + } }, "overrideFeatureInstallOrder": [ - "ghcr.io/devcontainers-contrib/features/poetry", - "ghcr.io/devcontainers-contrib/features/bash-command" + "ghcr.io/devcontainers-extra/features/poetry", + "ghcr.io/devcontainers-extra/features/bash-command" ], "postCreateCommand": "poetry install --with dev && mkdir -p /home/vscode/.config/k9s", "customizations": { "vscode": { "extensions": [ + "charliermarsh.ruff", + "matangover.mypy", + "ms-python.mypy-type-checker", "ms-python.python", - "ms-python.flake8", - "ms-python.black-formatter", - "ms-python.isort", "ms-python.vscode-pylance", - "ms-vscode.makefile-tools", - "matangover.mypy" + "ms-vscode.makefile-tools" ] } } diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 7369ac7cc..1e7a6cf06 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -1,8 +1,6 @@ -version: '3.8' - services: data_service: - image: "mcr.microsoft.com/devcontainers/python:3.12-bookworm" + image: "mcr.microsoft.com/devcontainers/python:3.13-bookworm" volumes: - ../:/workspace:cached - .poetry_cache/:/poetry_cache:delegated @@ -26,10 +24,13 @@ services: NB_SERVER_OPTIONS__DEFAULTS_PATH: /workspace/server_defaults.json NB_SERVER_OPTIONS__UI_CHOICES_PATH: /workspace/server_options.json KUBECONFIG: "/workspace/.k3d-config.yaml" + SOLR_URL: "http://localhost:8983" + SOLR_CORE: "renku-search-dev" network_mode: service:db depends_on: - db - authz + - solr db: image: postgres:latest @@ -43,13 +44,18 @@ services: POSTGRES_USER: renku POSTGRES_DB: postgres POSTGRES_PASSWORD: renku + # All services is put into the network of this service, so + # everything is reachable via localhost. This is necessary, + # because authzed doesn't allow insecure connections from anything + # other than localhost…. ports: - - "8000:8000" - - "5432:5432" - - "8080:8080" - - "5678:5678" - - "50051:50051" + - "8000:8000" # data-service api + - "5432:5432" # postgresql + - "8080:8080" # swagger + - "5678:5678" # python debugger (data_service) + - "50051:50051" # authzed grpc - "8888:80" + - "8983:8983" # solr swagger: image: swaggerapi/swagger-ui @@ -70,5 +76,17 @@ services: command: - serve + solr: + image: solr:9 + restart: unless-stopped + network_mode: service:db + volumes: + - solr_data:/var/solr + command: + - bash + - -c + - 'precreate-core renku-search-dev; exec solr -f -Dsolr.modules=analysis-extras' + volumes: postgres-data: + solr_data: diff --git a/.devcontainer/rclone/devcontainer-feature.json b/.devcontainer/rclone/devcontainer-feature.json new file mode 100644 index 000000000..384470e34 --- /dev/null +++ b/.devcontainer/rclone/devcontainer-feature.json @@ -0,0 +1,29 @@ +{ + "id": "rclone", + "version": "1.0.0", + "name": "A feature adding a custom version of rclone", + "postCreateCommand": "rclone --version", + "installsAfter": [ + "ghcr.io/devcontainers-contrib/features/bash-command", + "ghcr.io/devcontainers/features/go" + ], + "options": { + "rclone_repository": { + "type": "string", + "description": "rclone repository", + "proposals": [ + "https://github.com/SwissDataScienceCenter/rclone.git", + "https://github.com/rclone/rclone.git" + ], + "default": "https://github.com/rclone/rclone.git" + }, + "rclone_ref": { + "type": "string", + "description": "git reference", + "proposals": [ + "master" + ], + "default": "v1.69.1" + } + } +} diff --git a/.devcontainer/rclone/install.sh b/.devcontainer/rclone/install.sh new file mode 100644 index 000000000..c9a86242b --- /dev/null +++ b/.devcontainer/rclone/install.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +USERNAME="${_REMOTE_USER}" + +set -ex + +echo "Downloading rclone sources from ${RCLONE_REPOSITORY}:${RCLONE_REF}" +mkdir -p /tmp/rclone +cd /tmp/rclone +git clone "${RCLONE_REPOSITORY}" +cd rclone +git checkout "${RCLONE_REF}" + +echo "Building rclone" +make rclone +cd $HOME +rm -rf /tmp/rclone + +# Fix the $GOPATH folder +chown -R "${USERNAME}:golang" /go +chmod -R g+r+w /go diff --git a/.devcontainer/solr/devcontainer-feature.json b/.devcontainer/solr/devcontainer-feature.json new file mode 100644 index 000000000..5b9e11018 --- /dev/null +++ b/.devcontainer/solr/devcontainer-feature.json @@ -0,0 +1,18 @@ +{ + "name": "solr", + "id": "solr", + "version": "1.0.0", + "description": "A feature adding solr", + "options": { + "solr_version": { + "type": "string", + "description": "solr version to install", + "proposals": ["9.8.0"], + "default": "9.8.0" + } + }, + "installsAfter": [ + "ghcr.io/devcontainers/features/java", + "ghcr.io/devcontainers-contrib/features/bash-command" + ], +} diff --git a/.devcontainer/solr/install.sh b/.devcontainer/solr/install.sh new file mode 100644 index 000000000..b57cc8a8d --- /dev/null +++ b/.devcontainer/solr/install.sh @@ -0,0 +1,15 @@ +#!/bin/sh +set -e + +echo "Activation SOLR feature" +download_url="https://archive.apache.org/dist/solr/solr/$SOLR_VERSION/solr-$SOLR_VERSION.tgz" + +curl -sSL -o solr.tgz "$download_url" +mkdir -p /opt +tar -C /opt -xzf solr.tgz +ln -snf "/opt/solr-$SOLR_VERSION" /opt/solr +ln -snf /opt/solr/bin/solr /usr/local/bin/solr +mkdir -p /opt/solr/server/logs +chmod 777 /opt/solr/server/logs +chmod 777 /opt/solr/bin +chown -R vscode:vscode "/opt/solr-$SOLR_VERSION" diff --git a/.github/pull_request_template/feature.md b/.github/pull_request_template/feature.md new file mode 100644 index 000000000..3245bd11b --- /dev/null +++ b/.github/pull_request_template/feature.md @@ -0,0 +1,34 @@ +# Release Notes + +## Description + + + +## User-Facing Changes + +**🌟 New Features** + +- 🐸 **Data services**: + +**✨ Improvements** + +- 🐸 **Data services**: + +**🐞 Bug Fixes** + +- **Data services**: + +## Internal Changes + +**Improvements** + +- **Data services**: + +**Bug Fixes** + +- **Data services**: + +# Internal/Miscellaneous + + diff --git a/.github/workflows/acceptance-tests.yml b/.github/workflows/acceptance-tests.yml index 33924f3ea..5e47456a7 100644 --- a/.github/workflows/acceptance-tests.yml +++ b/.github/workflows/acceptance-tests.yml @@ -1,5 +1,8 @@ name: Acceptance tests +permissions: + contents: read + pull-requests: read on: pull_request: @@ -11,12 +14,13 @@ on: - closed concurrency: - group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + group: ${{ github.workflow }}-${{ github.ref || github.run_id }} cancel-in-progress: true jobs: check-deploy: - runs-on: ubuntu-22.04 + name: Analyze deploy string + runs-on: ubuntu-24.04 outputs: pr-contains-string: ${{ steps.deploy-comment.outputs.pr-contains-string }} renku: ${{ steps.deploy-comment.outputs.renku}} @@ -28,25 +32,35 @@ jobs: amalthea-sessions: ${{ steps.deploy-comment.outputs.amalthea-sessions}} amalthea: ${{ steps.deploy-comment.outputs.amalthea}} test-enabled: ${{ steps.deploy-comment.outputs.test-enabled}} - test-cypress-enabled: ${{ steps.deploy-comment.outputs.test-cypress-enabled}} - persist: ${{ steps.deploy-comment.outputs.persist}} + test-legacy-enabled: ${{ steps.deploy-comment.outputs.test-legacy-enabled}} extra-values: ${{ steps.deploy-comment.outputs.extra-values}} steps: - id: deploy-comment - uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.13.0 + uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.18.2 with: - string: /deploy pr_ref: ${{ github.event.number }} deploy-pr: + name: Deploy + runs-on: ubuntu-24.04 needs: [check-deploy] + permissions: + pull-requests: write if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' - runs-on: ubuntu-22.04 environment: name: renku-ci-ds-${{ github.event.number }} url: https://renku-ci-ds-${{ github.event.number }}.dev.renku.ch steps: - uses: actions/checkout@v4 + with: + repository: SwissDataScienceCenter/renku + sparse-checkout: | + minimal-deployment + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.RENKU_DOCKER_USERNAME }} + password: ${{ secrets.RENKU_DOCKER_PASSWORD }} - name: Find deplyoment url uses: peter-evans/find-comment@v3 id: deploymentUrlMessage @@ -63,21 +77,17 @@ jobs: body: | You can access the deployment of this PR at https://renku-ci-ds-${{ github.event.number }}.dev.renku.ch - name: Build and deploy - uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.13.0 + uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.18.2 env: - RANCHER_PROJECT_ID: ${{ secrets.CI_RANCHER_PROJECT }} DOCKER_PASSWORD: ${{ secrets.RENKU_DOCKER_PASSWORD }} DOCKER_USERNAME: ${{ secrets.RENKU_DOCKER_USERNAME }} GITLAB_TOKEN: ${{ secrets.DEV_GITLAB_TOKEN }} KUBECONFIG: ${{ github.workspace }}/renkubot-kube.config RENKU_RELEASE: renku-ci-ds-${{ github.event.number }} RENKU_VALUES_FILE: ${{ github.workspace }}/values.yaml - RENKU_VALUES: ${{ secrets.COMBINED_CHARTS_CI_RENKU_VALUES }} + RENKU_VALUES: minimal-deployment/minimal-deployment-values.yaml RENKUBOT_KUBECONFIG: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} RENKUBOT_RANCHER_BEARER_TOKEN: ${{ secrets.RENKUBOT_RANCHER_BEARER_TOKEN }} - RANCHER_DEV_API_ENDPOINT: ${{ secrets.RANCHER_DEV_API_ENDPOINT }} - RENKU_BOT_DEV_PASSWORD: ${{ secrets.RENKU_BOT_DEV_PASSWORD }} - RENKU_TESTS_ENABLED: true TEST_ARTIFACTS_PATH: "tests-artifacts-${{ github.sha }}" renku_ui: "${{ needs.check-deploy.outputs.renku-ui }}" renku: "${{ needs.check-deploy.outputs.renku }}" @@ -90,49 +100,81 @@ jobs: amalthea_sessions: "${{ needs.check-deploy.outputs.amalthea-sessions }}" extra_values: "${{ needs.check-deploy.outputs.extra-values }}" - selenium-acceptance-tests: + legacy-selenium-acceptance-tests: + name: Legacy Selenium tests needs: [check-deploy, deploy-pr] - if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-enabled == 'true' - runs-on: ubuntu-22.04 + if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-legacy-enabled == 'true' + runs-on: ubuntu-24.04 steps: - - uses: SwissDataScienceCenter/renku-actions/test-renku@v1.13.0 + - uses: SwissDataScienceCenter/renku-actions/test-renku@v1.18.2 with: kubeconfig: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} renku-release: renku-ci-ds-${{ github.event.number }} gitlab-token: ${{ secrets.DEV_GITLAB_TOKEN }} - persist: "${{ needs.check-deploy.outputs.persist }}" s3-results-access-key: ${{ secrets.ACCEPTANCE_TESTS_BUCKET_ACCESS_KEY }} s3-results-secret-key: ${{ secrets.ACCEPTANCE_TESTS_BUCKET_SECRET_KEY }} test-timeout-mins: "60" - cypress-acceptance-tests: - if: | - github.event.action != 'closed' && - needs.check-deploy.outputs.pr-contains-string == 'true' && - (needs.check-deploy.outputs.test-enabled == 'true' || needs.check-deploy.outputs.test-cypress-enabled == 'true') + legacy-cypress-acceptance-tests: + name: Legacy Cypress tests + runs-on: ubuntu-24.04 needs: [check-deploy, deploy-pr] - runs-on: ubuntu-22.04 - + if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-legacy-enabled == 'true' strategy: fail-fast: false - max-parallel: 1 matrix: - tests: [publicProject, updateProjects, useSession] + tests: + - publicProject + - privateProject + - updateProjects + - testDatasets + - useSession + - checkWorkflows + - rstudioSession + steps: + - name: Extract Renku repository reference + run: echo "RENKU_REFERENCE=`echo '${{ needs.check-deploy.outputs.renku }}' | cut -d'@' -f2`" >> $GITHUB_ENV + - uses: SwissDataScienceCenter/renku-actions/test-renku-cypress@v1.18.2 + with: + e2e-target: ${{ matrix.tests }} + renku-reference: ${{ env.RENKU_REFERENCE }} + renku-release: renku-ci-ds-${{ github.event.number }} + test-user-password: ${{ secrets.RENKU_BOT_DEV_PASSWORD }} + cypress-acceptance-tests: + name: Cypress tests + runs-on: ubuntu-24.04 + needs: [check-deploy, deploy-pr] + if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-enabled == 'true' + strategy: + fail-fast: false + matrix: + tests: + - anonymousNavigation + - dashboardV2 + - groupBasics + - projectBasics + - projectResources + - searchEntities + - sessionBasics steps: - name: Extract Renku repository reference run: echo "RENKU_REFERENCE=`echo '${{ needs.check-deploy.outputs.renku }}' | cut -d'@' -f2`" >> $GITHUB_ENV - - uses: SwissDataScienceCenter/renku-actions/test-renku-cypress@v1.13.0 + - uses: SwissDataScienceCenter/renku-actions/test-renku-cypress@v1.18.2 with: + e2e-folder: cypress/e2e/v2/ e2e-target: ${{ matrix.tests }} renku-reference: ${{ env.RENKU_REFERENCE }} renku-release: renku-ci-ds-${{ github.event.number }} test-user-password: ${{ secrets.RENKU_BOT_DEV_PASSWORD }} cleanup: + name: Cleanup + runs-on: ubuntu-24.04 needs: check-deploy if: github.event.action == 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' - runs-on: ubuntu-22.04 + permissions: + pull-requests: write steps: - name: Find deplyoment url uses: peter-evans/find-comment@v3 @@ -150,7 +192,7 @@ jobs: body: | Tearing down the temporary RenkuLab deplyoment for this PR. - name: renku teardown - uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.13.0 + uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.18.2 env: HELM_RELEASE_REGEX: "^renku-ci-ds-${{ github.event.number }}$" GITLAB_TOKEN: ${{ secrets.DEV_GITLAB_TOKEN }} diff --git a/.github/workflows/save_cache.yml b/.github/workflows/save_cache.yml index 41c8c8b86..eb30e0e69 100644 --- a/.github/workflows/save_cache.yml +++ b/.github/workflows/save_cache.yml @@ -17,11 +17,11 @@ jobs: DEVCONTAINER_IMAGE_CACHE: ghcr.io/swissdatasciencecenter/renku-data-services/devcontainer steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 - name: Login to Docker Hub - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.actor }} diff --git a/.github/workflows/test_publish.yml b/.github/workflows/test_publish.yml index a9ccf9157..9bb0b27ea 100644 --- a/.github/workflows/test_publish.yml +++ b/.github/workflows/test_publish.yml @@ -8,7 +8,12 @@ on: - "v*" pull_request: branches: - +concurrency: + group: ${{ github.workflow }}-${{ github.ref || github.run_id }} + cancel-in-progress: true +defaults: + run: + shell: bash env: DEVCONTAINER_REGISTRY: ghcr.io DEVCONTAINER_IMAGE_NAME: ${{ github.repository }}/devcontainer @@ -85,32 +90,8 @@ jobs: - name: Style checks uses: devcontainers/ci@v0.3 with: - runCmd: make style_checks - push: never - skipContainerUserIdUpdate: false - cacheFrom: ${{ needs.build-devcontainer.outputs.image_repository }}:${{ needs.build-devcontainer.outputs.image_tag }} - optional-checks: - runs-on: ubuntu-latest - needs: - - build-devcontainer - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - uses: actions/cache/restore@v4 - name: Restore cache - with: - path: ${{ env.CACHE_PATH }} - key: ${{ env.CACHE_KEY }} - - name: Set Git config - shell: bash - run: | - git config --add user.name "Renku Bot" - git config --add user.email "renku@datascience.ch" - - name: Check Avro Schemas are up to date - uses: devcontainers/ci@v0.3 - with: - runCmd: make check_avro + runCmd: | + make style_checks && poetry run pre-commit run --all-files --show-diff-on-failure --hook-stage manual push: never skipContainerUserIdUpdate: false cacheFrom: ${{ needs.build-devcontainer.outputs.image_repository }}:${{ needs.build-devcontainer.outputs.image_tag }} @@ -235,16 +216,35 @@ jobs: needs: - test-main - test-schemathesis + strategy: + matrix: + include: + - service: "Data Service API" + image-name: renku/renku-data-service + dockerfile: ./projects/renku_data_service/Dockerfile + name: renku-data-services + - service: "Secrets Storage" + image-name: renku/secrets-storage + dockerfile: ./projects/secrets_storage/Dockerfile + name: secrets-storage + - service: "K8s Watcher" + image-name: renku/data-service-k8s-watcher + dockerfile: ./projects/k8s_watcher/Dockerfile + name: k8s-watcher + - service: "Data Tasks" + image-name: renku/data-service-data-tasks + dockerfile: ./projects/renku_data_tasks/Dockerfile + name: data-tasks steps: - name: Checkout uses: actions/checkout@v4 - - name: Docker meta data services + - name: Docker meta ${{ matrix.service }} id: meta uses: docker/metadata-action@v5 with: # list of Docker images to use as base name for tags images: | - renku/renku-data-service + ${{ matrix.image-name }} # generate Docker tags based on the following events/attributes tags: | type=ref,event=branch @@ -261,64 +261,29 @@ jobs: with: username: ${{ secrets.RENKU_DOCKER_USERNAME }} password: ${{ secrets.RENKU_DOCKER_PASSWORD }} - - name: Build and push data services - uses: docker/build-push-action@v4 + - name: Build and push ${{ matrix.service }} + uses: docker/build-push-action@v6 with: context: . - file: ./projects/renku_data_service/Dockerfile + file: ${{ matrix.dockerfile }} platforms: linux/amd64,linux/arm64 push: true tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} - cache-from: type=registry,ref=renku/renku-data-service:buildcache - cache-to: type=registry,ref=renku/renku-data-service:buildcache,mode=max - - name: Docker meta background jobs - id: meta-background-jobs - uses: docker/metadata-action@v5 - with: - # list of Docker images to use as base name for tags - images: | - renku/data-service-background-jobs - # generate Docker tags based on the following events/attributes - tags: | - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - type=sha - - name: Build and push background jobs - uses: docker/build-push-action@v4 - with: - context: . - file: ./projects/background_jobs/Dockerfile - platforms: linux/amd64,linux/arm64 - push: true - tags: ${{ steps.meta-background-jobs.outputs.tags }} - labels: ${{ steps.meta-background-jobs.outputs.labels }} - cache-from: type=registry,ref=renku/data-service-background-jobs:buildcache - cache-to: type=registry,ref=renku/data-service-background-jobs:buildcache,mode=max - - name: Docker meta secrets - id: meta-secrets-storage - uses: docker/metadata-action@v5 + cache-from: type=registry,ref=${{ matrix.image-name }}:buildcache + cache-to: type=registry,ref=${{ matrix.image-name}}:buildcache,mode=max + provenance: false + - name: Scan image ${{ matrix.service }} + uses: snyk/actions/docker@master + continue-on-error: true with: - # list of Docker images to use as base name for tags - images: | - renku/secrets-storage - # generate Docker tags based on the following events/attributes - tags: | - type=ref,event=branch - type=ref,event=pr - type=semver,pattern={{version}} - type=semver,pattern={{major}}.{{minor}} - type=sha - - name: Build and push secrets image - uses: docker/build-push-action@v4 + image: ${{ steps.meta.outputs.tags}} + args: --file=${{ matrix.dockerfile }} --severity-threshold=high --sarif-file-output=${{ matrix.name }}.sarif + env: + SNYK_TOKEN: ${{ secrets.SNYK_TOKEN }} + - name: Upload Snyk report + uses: github/codeql-action/upload-sarif@v3 + continue-on-error: true with: - context: . - file: ./projects/secrets_storage/Dockerfile - platforms: linux/amd64,linux/arm64 - push: true - tags: ${{ steps.meta-secrets-storage.outputs.tags }} - labels: ${{ steps.meta-secrets-storage.outputs.labels }} - cache-from: type=registry,ref=renku/secrets-storage:buildcache - cache-to: type=registry,ref=renku/secrets-storage:buildcache,mode=max + sarif_file: ${{ matrix.name }}.sarif + category: snyk_${{ matrix.name }} diff --git a/.gitignore b/.gitignore index 64f45481b..bc8b0518d 100644 --- a/.gitignore +++ b/.gitignore @@ -88,3 +88,7 @@ result *.gz *.tgz .dmypy.json + +# VScode local config +.vscode/* +!.vscode/settings.json diff --git a/.idea/runConfigurations/_template__of_py_test.xml b/.idea/runConfigurations/_template__of_py_test.xml new file mode 100644 index 000000000..eaa93eaa0 --- /dev/null +++ b/.idea/runConfigurations/_template__of_py_test.xml @@ -0,0 +1,20 @@ + + + + + diff --git a/.local_env b/.local_env new file mode 100644 index 000000000..a54155072 --- /dev/null +++ b/.local_env @@ -0,0 +1,26 @@ +export DATA_SERVICE_ROOT=$(cd "$(dirname ${0})"; pwd) + +export KUBECONFIG="${DATA_SERVICE_ROOT}/.k3d-config.yaml" + +export NB_SERVER_OPTIONS__DEFAULTS_PATH="${DATA_SERVICE_ROOT}/server_defaults.json" +export NB_SERVER_OPTIONS__UI_CHOICES_PATH="${DATA_SERVICE_ROOT}/server_options.json" +export CORS_ALLOW_ALL_ORIGINS=true + +export ALEMBIC_CONFIG="${DATA_SERVICE_ROOT}/components/renku_data_services/migrations/alembic.ini" + +export AUTHZ_DB_GRPC_PORT=50051 +export AUTHZ_DB_HOST=127.0.0.1 +export AUTHZ_DB_KEY=renku +export AUTHZ_DB_NO_TLS_CONNECTION=true + +export DB_HOST=127.0.0.1 +export DB_NAME=renku +export DB_PASSWORD=renku +export DB_USER=renku + +export ZED_TOKEN=renku +export ZED_ENDPOINT=127.0.0.1:50051 +export ZED_INSECURE=true + +export SOLR_URL="http://127.0.0.1:8983" +export SOLR_CORE="renku-search-dev" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1cb8f5931..c58be6a92 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,5 @@ default_language_version: - python: python3.12 + python: python3.13 repos: - repo: local hooks: @@ -8,6 +8,8 @@ repos: entry: make style_checks language: system pass_filenames: false + stages: + - pre-commit - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.5.0 hooks: @@ -18,11 +20,7 @@ repos: - id: check-toml - id: debug-statements - id: end-of-file-fixer - exclude: 'components/renku_data_services/message_queue/(avro_models|schemas)' + exclude: '(components/renku_data_services/message_queue/(avro_models|schemas))|(.ambr)' - id: mixed-line-ending - id: trailing-whitespace - exclude: 'components/renku_data_services/message_queue/(avro_models|schemas)' - - repo: https://github.com/asottile/yesqa - rev: v1.5.0 - hooks: - - id: yesqa + exclude: '(components/renku_data_services/message_queue/(avro_models|schemas))|(.ambr)' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a044f539b..b218b46f6 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -7,7 +7,7 @@ The sections below provide guidelines for various types of contributions. ### Bug Reports and Feature Requests -**Do not report security issues as bugs**, instead email us at contact@datascience.ch +**Do not report security issues as bugs**, instead email us at security@renku.io or [open a security advisory](https://github.com/SwissDataScienceCenter/renku-data-services/security/advisories/new) Bugs and feature requests should be reported on GitHub [Issues](https://github.com/SwissDataScienceCenter/renku-data-services/issues). diff --git a/DEVELOPING.md b/DEVELOPING.md index 2cc6a8d70..509ce2874 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -3,6 +3,10 @@ This document details the structure of the code in this repository. For information on how to set up a dev environment and run code, consult the [readme](/README.md). +### Coding guidelines + +These can be found in our [wiki](https://github.com/SwissDataScienceCenter/renku-data-services/wiki/Coding-guidelines). + ### Architecture #### Polylith @@ -11,13 +15,14 @@ Data Services follows a [polylith](https://polylith.gitbook.io/polylith) approac * `components` contain all the application code, divided into modules based on entity types * `bases` contains the glue code to bring the different components together into a single unit/api. The entrypoint of applications is usually a `main.py` in one of the bases -* `projects` contains the Dockerfiles and pyproject.toml's for each deployed service +* `projects` contains the Dockerfiles and pyproject.toml for each deployed service #### Bases/Projects There are three independent services/deployments (projects/bases): * Renku Data Services (`data_api`): The main CRUD service for persisting data in postgres * Secrets Storage (`secrets_storage_api`): Handles loading user secrets securely when needed -* Background Jobs (`background_jobs`): Kubernetes cronjobs for recurring tasks +* Data Tasks (`data_tasks`): Regular background tasks +* K8s cache (`k8s_cache`): Caches Kubernetes objects so as to not overload the Kubernetes API #### Components Within components, there are the following modules: @@ -27,13 +32,14 @@ Within components, there are the following modules: * *base_api*: Common functionality shared by different APIs * *base_models*: Common functionality shared by all domain models * *base_orm*: Common functionality shared by database object relational models -* *connected_services*: Code concerning third-party integrations (e.g. Gitlab/Github) +* *connected_services*: Code concerning third-party integrations (e.g. Gitlab/GitHub) * *crc*: Compute resource controls code, dealing with resource classes and resource pools for interactive compute * *db_config*: Database configuration * *errors*: Common application error types shared by all apis * *k8s*: Kubernetes client code * *message_queue*: Redis streams messaging code * *migrations*: Database migrations +* *metrics*: Store metrics data in a staging table * *namespace*: Code for handling namespaces (user/groups) * *platform*: Renku platform configuration code * *project*: Code for Project entities @@ -96,18 +102,14 @@ environment can be created: environment. 2. In another terminal, run `vm-run` (headless) to start a vm running necessary external services, like the PostgreSQL database. -3. Potentially run `poetry-fix-cfg` to alter the `pyvenv.cfg` so that - poetry will use the env built by nix +3. Run `poetry install` to install the python venv -Then `make run`, `make tests` etc can be used as usual. +Then `make run`, `make tests` etc. can be used as usual. The environment also contains other useful tools, like ruff-lsp, pyright and more. Instead of a vm, a development environment using NixOS containers is also available. -The first invocation will take a while for the first run, as the -python environment is being built. Subsequent calls are then instant. - It will run a bash shell, check out [direnv](https://direnv.net/) and the [use flake](https://direnv.net/man/direnv-stdlib.1.html#codeuse-flake-ltinstallablegtcode) function if you prefer to keep your favorite shell. @@ -120,9 +122,33 @@ surrounding services to run). * Run a specific test e.g.: `poetry run pytest test/bases/renku_data_services/data_api/test_data_connectors.py::test_create_openbis_data_connector` * Also run tests marked with `@pytest.mark.myskip`: `PYTEST_FORCE_RUN_MYSKIPS=1 make tests` +We use [Syrupy](https://github.com/syrupy-project/syrupy) for snapshotting data in tests. + +To update the snapshot data, run the following command in the devcontainer: +```bash +$ poetry run pytest -m "not schemathesis" -n auto --snapshot-update +``` + +### Directly from PyCharm + +From the root folder of the repository, run: + +1. `devcontainer build --workspace-folder .` +2. `devcontainer up --workspace-folder .` +3. `make schemas` +4. `make amalthea_schema` + +> **WARNING:** +> +> Be careful with the kubernetes environment in your shell, as in case of badly setup tests and environment you might try +> to run some tests against your default cluster. + +Then you can run the test as usual directly from PyCharm by clicking on the green arrow next to a specific test, or a +whole test suite or part of the test hierarchy. + ## Migrations -We use Alembic for migrations and we have a single version table for all schemas. This version table +We use Alembic for migrations, and we have a single version table for all schemas. This version table is used by Alembic to determine what migrations have been applied or not and it resides in the `common` schema. That is why all the Alembic commands include the `--name common` argument. diff --git a/Makefile b/Makefile index 1891cfc20..477b01f84 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,10 @@ -AMALTHEA_JS_VERSION ?= 0.13.0 -AMALTHEA_SESSIONS_VERSION ?= 0.13.0 +AMALTHEA_JS_VERSION ?= 0.20.0 +AMALTHEA_SESSIONS_VERSION ?= 0.20.0 CODEGEN_PARAMS := \ --input-file-type openapi \ --output-model-type pydantic_v2.BaseModel \ --use-double-quotes \ - --target-python-version 3.12 \ + --target-python-version 3.13 \ --collapse-root-models \ --field-constraints \ --strict-nullable \ @@ -13,6 +13,32 @@ CODEGEN_PARAMS := \ --set-default-enum-member \ --use-one-literal-as-default \ --use-default +CR_CODEGEN_PARAMS := \ + --input-file-type jsonschema \ + --output-model-type pydantic_v2.BaseModel \ + --use-double-quotes \ + --target-python-version 3.13 \ + --collapse-root-models \ + --field-constraints \ + --strict-nullable \ + --allow-extra-fields \ + --use-default-kwarg + +# A separate set of params without the --collaps-root-models option as +# this causes a bug in the code generator related to list of unions. +# https://github.com/koxudaxi/datamodel-code-generator/issues/1937 +SEARCH_CODEGEN_PARAMS := \ + --input-file-type openapi \ + --output-model-type pydantic_v2.BaseModel \ + --use-double-quotes \ + --target-python-version 3.13 \ + --field-constraints \ + --strict-nullable \ + --set-default-enum-member \ + --openapi-scopes schemas paths parameters \ + --set-default-enum-member \ + --use-one-literal-as-default \ + --use-default .PHONY: all all: help @@ -33,8 +59,8 @@ API_SPECS := \ components/renku_data_services/repositories/apispec.py \ components/renku_data_services/notebooks/apispec.py \ components/renku_data_services/platform/apispec.py \ - components/renku_data_services/message_queue/apispec.py \ - components/renku_data_services/data_connectors/apispec.py + components/renku_data_services/data_connectors/apispec.py \ + components/renku_data_services/search/apispec.py components/renku_data_services/crc/apispec.py: components/renku_data_services/crc/api.spec.yaml components/renku_data_services/storage/apispec.py: components/renku_data_services/storage/api.spec.yaml @@ -47,34 +73,12 @@ components/renku_data_services/connected_services/apispec.py: components/renku_d components/renku_data_services/repositories/apispec.py: components/renku_data_services/repositories/api.spec.yaml components/renku_data_services/notebooks/apispec.py: components/renku_data_services/notebooks/api.spec.yaml components/renku_data_services/platform/apispec.py: components/renku_data_services/platform/api.spec.yaml -components/renku_data_services/message_queue/apispec.py: components/renku_data_services/message_queue/api.spec.yaml components/renku_data_services/data_connectors/apispec.py: components/renku_data_services/data_connectors/api.spec.yaml +components/renku_data_services/search/apispec.py: components/renku_data_services/search/api.spec.yaml schemas: ${API_SPECS} ## Generate pydantic classes from apispec yaml files @echo "generated classes based on ApiSpec" -##@ Avro schemas - -.PHONY: download_avro -download_avro: ## Download the latest avro schema files - @echo "Downloading avro schema files" - curl -L -o schemas.tar.gz https://github.com/SwissDataScienceCenter/renku-schema/tarball/main - tar xf schemas.tar.gz --directory=components/renku_data_services/message_queue/schemas/ --strip-components=1 - rm schemas.tar.gz - -.PHONY: check_avro -check_avro: download_avro avro_models ## Download avro schemas, generate models and check if the avro schemas are up to date - @echo "checking if avro schemas are up to date" - git diff --exit-code || (git diff && exit 1) - -.PHONY: avro_models -avro_models: ## Generate message queue classes and code from the avro schemas - @echo "generating message queues classes from avro schemas" - poetry run python components/renku_data_services/message_queue/generate_models.py - -.PHONY: update_avro -update_avro: download_avro avro_models ## Download avro schemas and generate models - ##@ Test and linting .PHONY: style_checks @@ -82,7 +86,7 @@ style_checks: ${API_SPECS} ## Run linting and style checks poetry check poetry run mypy poetry run ruff format --check - poetry run ruff check . + poetry run ruff check poetry run bandit -c pyproject.toml -r . poetry poly check poetry poly libs @@ -113,6 +117,43 @@ tests: test_setup main_tests schemathesis_tests collect_coverage ## Run all tes pre_commit_checks: ## Run pre-commit checks poetry run pre-commit run --all-files +##@ Helm/k8s + +.PHONY: k3d_cluster +k3d_cluster: ## Creates a k3d cluster for testing + ./setup-k3d-cluster.sh --reset --deploy-shipwright + +.PHONY: install_amaltheas +install_amaltheas: ## Installs both version of amalthea in the. NOTE: It uses the currently active k8s context. + helm repo add renku https://swissdatasciencecenter.github.io/helm-charts + helm repo update + helm upgrade --install amalthea-js renku/amalthea --version $(AMALTHEA_JS_VERSION) + helm upgrade --install amalthea-se renku/amalthea-sessions --version ${AMALTHEA_SESSIONS_VERSION} + +# TODO: Add the version variables from the top of the file here when the charts are fully published +.PHONY: amalthea_schema +amalthea_schema: ## Updates generates pydantic classes from CRDs + curl https://raw.githubusercontent.com/SwissDataScienceCenter/amalthea/${AMALTHEA_SESSIONS_VERSION}/config/crd/bases/amalthea.dev_amaltheasessions.yaml | yq '.spec.versions[0].schema.openAPIV3Schema' | poetry run datamodel-codegen --output components/renku_data_services/notebooks/cr_amalthea_session.py --base-class renku_data_services.notebooks.cr_base.BaseCRD ${CR_CODEGEN_PARAMS} + curl https://raw.githubusercontent.com/SwissDataScienceCenter/amalthea/${AMALTHEA_JS_VERSION}/controller/crds/jupyter_server.yaml | yq '.spec.versions[0].schema.openAPIV3Schema' | poetry run datamodel-codegen --output components/renku_data_services/notebooks/cr_jupyter_server.py --base-class renku_data_services.notebooks.cr_base.BaseCRD ${CR_CODEGEN_PARAMS} + +.PHONY: shipwright_schema +shipwright_schema: ## Updates the Shipwright pydantic classes + curl https://raw.githubusercontent.com/shipwright-io/build/refs/tags/v0.15.2/deploy/crds/shipwright.io_buildruns.yaml | yq '.spec.versions[] | select(.name == "v1beta1") | .schema.openAPIV3Schema' | poetry run datamodel-codegen --output components/renku_data_services/session/cr_shipwright_buildrun.py --base-class renku_data_services.session.cr_base.BaseCRD ${CR_CODEGEN_PARAMS} + +##@ Devcontainer + +.PHONY: devcontainer_up +devcontainer_up: ## Start dev containers + devcontainer up --workspace-folder . + +.PHONY: devcontainer_rebuild +devcontainer_rebuild: ## Rebuild dev containers images + devcontainer up --remove-existing-container --workspace-folder . + +.PHONY: devcontainer_exec +devcontainer_exec: devcontainer_up ## Start a shell in the development container + devcontainer exec --container-id renku-data-services_devcontainer-data_service-1 -- bash + ##@ General .PHONY: run @@ -123,6 +164,18 @@ run: ## Run the sanic server debug: ## Debug the sanic server DUMMY_STORES=true poetry run python -Xfrozen_modules=off -m debugpy --listen 0.0.0.0:5678 --wait-for-client -m sanic renku_data_services.data_api.main:create_app --debug --single-process --port 8000 --host 0.0.0.0 +.PHONY: run-tasks +run-tasks: ## Run the data tasks + DUMMY_STORES=true poetry run python bases/renku_data_services/data_tasks/main.py + +.PHONY: lock +lock: ## Update the lock files for all projects from their repsective poetry.toml + poetry lock $(ARGS) + poetry -C projects/renku_data_service lock $(ARGS) + poetry -C projects/secrets_storage lock $(ARGS) + poetry -C projects/k8s_watcher lock $(ARGS) + poetry -C projects/renku_data_tasks lock $(ARGS) + # From the operator sdk Makefile # The help target prints out all targets with their descriptions organized # beneath their categories. The categories are represented by '##@' and the @@ -138,30 +191,12 @@ debug: ## Debug the sanic server help: ## Display this help. @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m\033[0m\n"} /^[a-zA-Z_0-9-]+:.*?##/ { printf " \033[36m%-25s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) -##@ Helm/k8s - -.PHONY: k3d_cluster -k3d_cluster: ## Creates a k3d cluster for testing - k3d cluster delete - k3d cluster create --agents 1 --k3s-arg --disable=metrics-server@server:0 - -.PHONY: install_amaltheas -install_amaltheas: ## Installs both version of amalthea in the. NOTE: It uses the currently active k8s context. - helm repo add renku https://swissdatasciencecenter.github.io/helm-charts - helm repo update - helm upgrade --install amalthea-js renku/amalthea --version $(AMALTHEA_JS_VERSION) - helm upgrade --install amalthea-se renku/amalthea-sessions --version ${AMALTHEA_SESSIONS_VERSION} - -# TODO: Add the version variables from the top of the file here when the charts are fully published -.PHONY: amalthea_schema -amalthea_schema: ## Updates generates pydantic classes from CRDs - curl https://raw.githubusercontent.com/SwissDataScienceCenter/amalthea/main/config/crd/bases/amalthea.dev_amaltheasessions.yaml | yq '.spec.versions[0].schema.openAPIV3Schema' | poetry run datamodel-codegen --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output components/renku_data_services/notebooks/cr_amalthea_session.py --use-double-quotes --target-python-version 3.12 --collapse-root-models --field-constraints --strict-nullable --base-class renku_data_services.notebooks.cr_base.BaseCRD --allow-extra-fields --use-default-kwarg - curl https://raw.githubusercontent.com/SwissDataScienceCenter/amalthea/main/controller/crds/jupyter_server.yaml | yq '.spec.versions[0].schema.openAPIV3Schema' | poetry run datamodel-codegen --input-file-type jsonschema --output-model-type pydantic_v2.BaseModel --output components/renku_data_services/notebooks/cr_jupyter_server.py --use-double-quotes --target-python-version 3.12 --collapse-root-models --field-constraints --strict-nullable --base-class renku_data_services.notebooks.cr_base.BaseCRD --allow-extra-fields --use-default-kwarg - # Pattern rules +API_SPEC_CODEGEN_PARAMS := ${CODEGEN_PARAMS} %/apispec.py: %/api.spec.yaml - poetry run datamodel-codegen --input $< --output $@ --base-class $(subst /,.,$(subst .py,_base.BaseAPISpec,$(subst components/,,$@))) ${CODEGEN_PARAMS} + $(if $(findstring /search/, $(<)), $(eval API_SPEC_CODEGEN_PARAMS=${SEARCH_CODEGEN_PARAMS})) + poetry run datamodel-codegen --input $< --output $@ --base-class $(subst /,.,$(subst .py,_base.BaseAPISpec,$(subst components/,,$@))) ${API_SPEC_CODEGEN_PARAMS} # If the only difference is the timestamp comment line, ignore it by # reverting to the checked in version. As the file timestamps is now # newer than the requirements these steps won't be re-triggered. diff --git a/bases/renku_data_services/background_jobs/__init__.py b/bases/renku_data_services/background_jobs/__init__.py deleted file mode 100644 index 686ff5c0a..000000000 --- a/bases/renku_data_services/background_jobs/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Scripts for running background jobs.""" diff --git a/bases/renku_data_services/background_jobs/config.py b/bases/renku_data_services/background_jobs/config.py deleted file mode 100644 index dc3d7986e..000000000 --- a/bases/renku_data_services/background_jobs/config.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Configurations for background jobs.""" - -import os -from collections.abc import Callable -from dataclasses import dataclass - -from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine -from sqlalchemy.orm import sessionmaker - -from renku_data_services.authz.authz import Authz -from renku_data_services.authz.config import AuthzConfig -from renku_data_services.data_connectors.db import DataConnectorProjectLinkRepository, DataConnectorRepository -from renku_data_services.data_connectors.migration_utils import DataConnectorMigrationTool -from renku_data_services.errors import errors -from renku_data_services.message_queue.config import RedisConfig -from renku_data_services.message_queue.db import EventRepository -from renku_data_services.message_queue.redis_queue import RedisQueue -from renku_data_services.namespace.db import GroupRepository -from renku_data_services.project.db import ProjectRepository -from renku_data_services.users.db import UserRepo, UsersSync -from renku_data_services.users.kc_api import IKeycloakAPI, KeycloakAPI - - -@dataclass -class SyncConfig: - """Main configuration.""" - - syncer: UsersSync - kc_api: IKeycloakAPI - authz_config: AuthzConfig - group_repo: GroupRepository - event_repo: EventRepository - project_repo: ProjectRepository - data_connector_migration_tool: DataConnectorMigrationTool - session_maker: Callable[..., AsyncSession] - - @classmethod - def from_env(cls, prefix: str = "") -> "SyncConfig": - """Generate a configuration from environment variables.""" - pg_host = os.environ.get(f"{prefix}DB_HOST", "localhost") - pg_user = os.environ.get(f"{prefix}DB_USER", "renku") - pg_port = os.environ.get(f"{prefix}DB_PORT", "5432") - db_name = os.environ.get(f"{prefix}DB_NAME", "renku") - pg_password = os.environ.get(f"{prefix}DB_PASSWORD") - if pg_password is None: - raise errors.ConfigurationError( - message="Please provide a database password in the 'DB_PASSWORD' environment variable." - ) - async_sqlalchemy_url = f"postgresql+asyncpg://{pg_user}:{pg_password}@{pg_host}:{pg_port}/{db_name}" - # NOTE: the pool here is not used to serve HTTP requests, it is only used in background jobs. - # Therefore, we want to consume very few connections and we can wait for an available connection - # much longer than the default 30 seconds. In our tests syncing 15 users times out with the default. - engine = create_async_engine(async_sqlalchemy_url, pool_size=4, max_overflow=0, pool_timeout=600) - session_maker: Callable[..., AsyncSession] = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) # type: ignore[call-overload] - redis = RedisConfig.from_env(prefix) - message_queue = RedisQueue(redis) - - authz_config = AuthzConfig.from_env() - event_repo = EventRepository(session_maker=session_maker, message_queue=message_queue) - group_repo = GroupRepository( - session_maker, - event_repo=event_repo, - group_authz=Authz(authz_config), - message_queue=message_queue, - ) - project_repo = ProjectRepository( - session_maker=session_maker, - message_queue=message_queue, - event_repo=event_repo, - group_repo=group_repo, - authz=Authz(authz_config), - ) - data_connector_repo = DataConnectorRepository( - session_maker=session_maker, - authz=Authz(authz_config), - ) - data_connector_project_link_repo = DataConnectorProjectLinkRepository( - session_maker=session_maker, - authz=Authz(authz_config), - ) - data_connector_migration_tool = DataConnectorMigrationTool( - session_maker=session_maker, - data_connector_repo=data_connector_repo, - data_connector_project_link_repo=data_connector_project_link_repo, - project_repo=project_repo, - authz=Authz(authz_config), - ) - user_repo = UserRepo( - session_maker=session_maker, - message_queue=message_queue, - event_repo=event_repo, - group_repo=group_repo, - encryption_key=None, - authz=Authz(authz_config), - ) - syncer = UsersSync( - session_maker, - message_queue=message_queue, - event_repo=event_repo, - group_repo=group_repo, - user_repo=user_repo, - authz=Authz(authz_config), - ) - keycloak_url = os.environ[f"{prefix}KEYCLOAK_URL"] - client_id = os.environ[f"{prefix}KEYCLOAK_CLIENT_ID"] - client_secret = os.environ[f"{prefix}KEYCLOAK_CLIENT_SECRET"] - realm = os.environ.get(f"{prefix}KEYCLOAK_REALM", "Renku") - kc_api = KeycloakAPI(keycloak_url=keycloak_url, client_id=client_id, client_secret=client_secret, realm=realm) - return cls( - syncer, - kc_api, - authz_config, - group_repo, - event_repo, - project_repo, - data_connector_migration_tool, - session_maker, - ) diff --git a/bases/renku_data_services/background_jobs/core.py b/bases/renku_data_services/background_jobs/core.py deleted file mode 100644 index 7f9800377..000000000 --- a/bases/renku_data_services/background_jobs/core.py +++ /dev/null @@ -1,307 +0,0 @@ -"""Different utility functions for background jobs.""" - -import logging - -from authzed.api.v1 import ( - Consistency, - LookupResourcesRequest, - ObjectReference, - ReadRelationshipsRequest, - Relationship, - RelationshipFilter, - RelationshipUpdate, - SubjectFilter, - SubjectReference, - WriteRelationshipsRequest, -) -from ulid import ULID - -from renku_data_services.authz.authz import Authz, ResourceType, _AuthzConverter, _Relation -from renku_data_services.authz.models import Scope -from renku_data_services.background_jobs.config import SyncConfig -from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId -from renku_data_services.errors import errors -from renku_data_services.message_queue.avro_models.io.renku.events import v2 -from renku_data_services.message_queue.converters import EventConverter -from renku_data_services.namespace.models import NamespaceKind - - -async def generate_user_namespaces(config: SyncConfig) -> None: - """Generate namespaces for users if there are none.""" - await config.group_repo.generate_user_namespaces() - - -async def sync_user_namespaces(config: SyncConfig) -> None: - """Lists all user namespaces in the database and adds them to Authzed and the event queue.""" - authz = Authz(config.authz_config) - user_namespaces = config.group_repo._get_user_namespaces() - logging.info("Start syncing user namespaces to the authorization DB and message queue") - num_authz: int = 0 - num_events: int = 0 - num_total: int = 0 - async for user_namespace in user_namespaces: - num_total += 1 - events = EventConverter.to_events(user_namespace, v2.UserAdded) - authz_change = authz._add_user_namespace(user_namespace.namespace) - session = config.session_maker() - tx = session.begin() - await tx.start() - try: - await authz.client.WriteRelationships(authz_change.apply) - num_authz += 1 - for event in events: - await config.event_repo.store_event(session, event) - num_events += 1 - except Exception as err: - # NOTE: We do not rollback the authz changes here because it is OK if something is in Authz DB - # but not in the message queue but not vice-versa. - logging.error(f"Failed to sync user namespace {user_namespace} because {err}") - await tx.rollback() - else: - await tx.commit() - finally: - await session.close() - logging.info(f"Wrote authorization changes for {num_authz}/{num_total} user namespaces") - logging.info(f"Wrote to event queue database for {num_events}/{num_total} user namespaces") - - -async def bootstrap_user_namespaces(config: SyncConfig) -> None: - """Synchronize user namespaces to the authorization database only if none are already present.""" - authz = Authz(config.authz_config) - rels = aiter( - authz.client.ReadRelationships( - ReadRelationshipsRequest( - relationship_filter=RelationshipFilter( - resource_type=ResourceType.user_namespace.value, optional_relation=_Relation.owner.value - ) - ) - ) - ) - num_rels = 0 - for _ in range(5): - if await anext(rels, None) is not None: - num_rels += 1 - if num_rels >= 5: - logging.info( - "Found at least 5 user namespace in the authorization database, " - "will not sync user namespaces to authorization." - ) - return - await sync_user_namespaces(config) - - -async def fix_mismatched_project_namespace_ids(config: SyncConfig) -> None: - """Fixes a problem where the project namespace relationship for projects has the wrong group ID.""" - api_user = InternalServiceAdmin(id=ServiceAdminId.migrations) - authz = Authz(config.authz_config) - res = authz.client.ReadRelationships( - ReadRelationshipsRequest( - consistency=Consistency(fully_consistent=True), - relationship_filter=RelationshipFilter( - resource_type=ResourceType.project, - optional_relation=_Relation.project_namespace.value, - optional_subject_filter=SubjectFilter(subject_type=ResourceType.group.value), - ), - ) - ) - async for rel in res: - logging.info(f"Checking project namespace - group relation {rel} for correct group ID") - project_id = rel.relationship.resource.object_id - try: - project = await config.project_repo.get_project(api_user, project_id) - except errors.MissingResourceError: - logging.info(f"Couldn't find project {project_id}, deleting relation") - await authz.client.WriteRelationships( - WriteRelationshipsRequest( - updates=[ - RelationshipUpdate( - operation=RelationshipUpdate.OPERATION_DELETE, - relationship=rel.relationship, - ), - ] - ) - ) - continue - - if project.namespace.kind != NamespaceKind.group: - continue - correct_group_id = project.namespace.underlying_resource_id - authzed_group_id = rel.relationship.subject.object.object_id - if authzed_group_id != correct_group_id: - logging.info( - f"The project namespace ID in Authzed {authzed_group_id} " - f"does not match the expected group ID {correct_group_id}, correcting it..." - ) - await authz.client.WriteRelationships( - WriteRelationshipsRequest( - updates=[ - RelationshipUpdate( - operation=RelationshipUpdate.OPERATION_TOUCH, - relationship=Relationship( - resource=rel.relationship.resource, - relation=rel.relationship.relation, - subject=SubjectReference( - object=ObjectReference( - object_type=ResourceType.group.value, object_id=str(correct_group_id) - ) - ), - ), - ), - RelationshipUpdate( - operation=RelationshipUpdate.OPERATION_DELETE, - relationship=rel.relationship, - ), - ] - ) - ) - - -async def migrate_groups_make_all_public(config: SyncConfig) -> None: - """Update existing groups to make them public.""" - logger = logging.getLogger("background_jobs").getChild(migrate_groups_make_all_public.__name__) - - authz = Authz(config.authz_config) - all_groups = authz.client.ReadRelationships( - ReadRelationshipsRequest( - relationship_filter=RelationshipFilter( - resource_type=ResourceType.group.value, - optional_relation=_Relation.group_platform.value, - ) - ) - ) - all_group_ids: set[str] = set() - async for group in all_groups: - all_group_ids.add(group.relationship.resource.object_id) - logger.info(f"All groups = {len(all_group_ids)}") - logger.info(f"All groups = {all_group_ids}") - - public_groups = authz.client.LookupResources( - LookupResourcesRequest( - resource_object_type=ResourceType.group.value, - permission=Scope.READ.value, - subject=SubjectReference(object=_AuthzConverter.anonymous_user()), - ) - ) - public_group_ids: set[str] = set() - async for group in public_groups: - public_group_ids.add(group.resource_object_id) - logger.info(f"Public groups = {len(public_group_ids)}") - logger.info(f"Public groups = {public_group_ids}") - - groups_to_process = all_group_ids - public_group_ids - logger.info(f"Groups to process = {groups_to_process}") - - all_users = SubjectReference(object=_AuthzConverter.all_users()) - all_anon_users = SubjectReference(object=_AuthzConverter.anonymous_users()) - for group_id in groups_to_process: - group_res = _AuthzConverter.group(ULID.from_str(group_id)) - all_users_are_viewers = Relationship( - resource=group_res, - relation=_Relation.public_viewer.value, - subject=all_users, - ) - all_anon_users_are_viewers = Relationship( - resource=group_res, - relation=_Relation.public_viewer.value, - subject=all_anon_users, - ) - authz_change = WriteRelationshipsRequest( - updates=[ - RelationshipUpdate(operation=RelationshipUpdate.OPERATION_TOUCH, relationship=rel) - for rel in [all_users_are_viewers, all_anon_users_are_viewers] - ] - ) - await authz.client.WriteRelationships(authz_change) - logger.info(f"Made group {group_id} public") - - -async def migrate_user_namespaces_make_all_public(config: SyncConfig) -> None: - """Update existing user namespaces to make them public.""" - logger = logging.getLogger("background_jobs").getChild(migrate_user_namespaces_make_all_public.__name__) - - authz = Authz(config.authz_config) - all_user_namespaces = authz.client.ReadRelationships( - ReadRelationshipsRequest( - relationship_filter=RelationshipFilter( - resource_type=ResourceType.user_namespace.value, - optional_relation=_Relation.user_namespace_platform.value, - ) - ) - ) - all_user_namespace_ids: set[str] = set() - async for ns in all_user_namespaces: - all_user_namespace_ids.add(ns.relationship.resource.object_id) - logger.info(f"All user namespaces = {len(all_user_namespace_ids)}") - logger.info(f"All user namespaces = {all_user_namespace_ids}") - - public_user_namespaces = authz.client.LookupResources( - LookupResourcesRequest( - resource_object_type=ResourceType.user_namespace.value, - permission=Scope.READ.value, - subject=SubjectReference(object=_AuthzConverter.anonymous_user()), - ) - ) - public_user_namespace_ids: set[str] = set() - async for ns in public_user_namespaces: - public_user_namespace_ids.add(ns.resource_object_id) - logger.info(f"Public user namespaces = {len(public_user_namespace_ids)}") - logger.info(f"Public user namespaces = {public_user_namespace_ids}") - - namespaces_to_process = all_user_namespace_ids - public_user_namespace_ids - logger.info(f"User namespaces to process = {namespaces_to_process}") - - all_users = SubjectReference(object=_AuthzConverter.all_users()) - all_anon_users = SubjectReference(object=_AuthzConverter.anonymous_users()) - for ns_id in namespaces_to_process: - namespace_res = _AuthzConverter.user_namespace(ULID.from_str(ns_id)) - all_users_are_viewers = Relationship( - resource=namespace_res, - relation=_Relation.public_viewer.value, - subject=all_users, - ) - all_anon_users_are_viewers = Relationship( - resource=namespace_res, - relation=_Relation.public_viewer.value, - subject=all_anon_users, - ) - authz_change = WriteRelationshipsRequest( - updates=[ - RelationshipUpdate(operation=RelationshipUpdate.OPERATION_TOUCH, relationship=rel) - for rel in [all_users_are_viewers, all_anon_users_are_viewers] - ] - ) - await authz.client.WriteRelationships(authz_change) - logger.info(f"Made user namespace {ns_id} public") - - -async def migrate_storages_v2_to_data_connectors(config: SyncConfig) -> list[BaseException]: - """Move storages_v2 to data_connectors.""" - logger = logging.getLogger("background_jobs").getChild(migrate_storages_v2_to_data_connectors.__name__) - - api_user = InternalServiceAdmin(id=ServiceAdminId.migrations) - storages_v2 = await config.data_connector_migration_tool.get_storages_v2(requested_by=api_user) - - if not storages_v2: - logger.info("Nothing to do.") - return [] - - logger.info(f"Migrating {len(storages_v2)} cloud storage v2 items to data connectors.") - failed_storages: list[str] = [] - errors: list[BaseException] = [] - for storage in storages_v2: - try: - data_connector = await config.data_connector_migration_tool.migrate_storage_v2( - requested_by=api_user, storage=storage - ) - logger.info(f"Migrated {storage.name} to {data_connector.namespace.slug}/{data_connector.slug}.") - logger.info(f"Deleted storage_v2: {storage.storage_id}") - except Exception as err: - logger.error(f"Failed to migrate {storage.name}.") - logger.error(err) - failed_storages.append(str(storage.storage_id)) - errors.append(err) - - logger.info(f"Migrated {len(storages_v2)-len(failed_storages)}/{len(storages_v2)} data connectors.") - if failed_storages: - logger.error(f"Migration failed for storages: {failed_storages}.") - return errors diff --git a/bases/renku_data_services/background_jobs/main.py b/bases/renku_data_services/background_jobs/main.py deleted file mode 100644 index 9e2e996f7..000000000 --- a/bases/renku_data_services/background_jobs/main.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Script to run a variety of background jobs independently from the data services deployment.""" - -import argparse -import asyncio -import logging - -from renku_data_services.authz.admin_sync import sync_admins_from_keycloak -from renku_data_services.authz.authz import Authz -from renku_data_services.background_jobs.config import SyncConfig -from renku_data_services.background_jobs.core import ( - bootstrap_user_namespaces, - fix_mismatched_project_namespace_ids, - generate_user_namespaces, - migrate_groups_make_all_public, - migrate_storages_v2_to_data_connectors, - migrate_user_namespaces_make_all_public, -) -from renku_data_services.background_jobs.utils import error_handler -from renku_data_services.migrations.core import run_migrations_for_app - -logging.basicConfig(level=logging.INFO) - - -async def short_period_sync() -> None: - """Perform synchronizations and jobs that should occur more often.""" - config = SyncConfig.from_env() - run_migrations_for_app("common") - - await error_handler( - [ - generate_user_namespaces(config), - bootstrap_user_namespaces(config), - config.syncer.events_sync(config.kc_api), - sync_admins_from_keycloak(config.kc_api, Authz(config.authz_config)), - fix_mismatched_project_namespace_ids(config), - migrate_groups_make_all_public(config), - migrate_user_namespaces_make_all_public(config), - migrate_storages_v2_to_data_connectors(config), - ] - ) - - -async def long_period_sync() -> None: - """Perform synchronizations and jobs that can occur more rarely.""" - config = SyncConfig.from_env() - run_migrations_for_app("common") - - await error_handler( - [config.syncer.users_sync(config.kc_api), sync_admins_from_keycloak(config.kc_api, Authz(config.authz_config))] - ) - - -async def main() -> None: - """Synchronize data from Keycloak and the user database.""" - logger = logging.getLogger("background_jobs") - logger.setLevel(logging.INFO) - - parser = argparse.ArgumentParser(prog="Data Service Background Jobs") - subparsers = parser.add_subparsers(help="Background job to run") - - short_period_sync_parser = subparsers.add_parser( - "short_period_sync", help="Perform background jobs that need to occur more often" - ) - short_period_sync_parser.set_defaults(func=short_period_sync) - long_period_sync_parser = subparsers.add_parser( - "long_period_sync", help="Perform background jobs that need to occur at a longer period" - ) - long_period_sync_parser.set_defaults(func=long_period_sync) - - args = parser.parse_args() - await args.func() - - -if __name__ == "__main__": - asyncio.run(main()) diff --git a/bases/renku_data_services/background_jobs/utils.py b/bases/renku_data_services/background_jobs/utils.py deleted file mode 100644 index 8b4061979..000000000 --- a/bases/renku_data_services/background_jobs/utils.py +++ /dev/null @@ -1,49 +0,0 @@ -"""Background jobs utilities.""" - -import traceback -from collections.abc import Coroutine -from dataclasses import dataclass - - -@dataclass -class BackgroundJobError(Exception): - """Error raised when an exception happenend in a background job.""" - - message = "Background job failed:" - errors: list[BaseException] - - def __format_errors__(self) -> str: - """Format contained errors for output.""" - error_messages = [] - total_errors = len(self.errors) - for i, error in enumerate(self.errors): - error_messages.append( - f"=== Error {i+1}/{total_errors} ===\n" - + "".join(traceback.TracebackException.from_exception(error).format()) - ) - - return "\n".join(error_messages) - - def __repr__(self) -> str: - """String representation of the error.""" - return f"{self.__class__.__qualname__}: {self.message}\n{self.__format_errors__()}" - - def __str__(self) -> str: - """String representation of the error.""" - return f"{self.__class__.__qualname__}: {self.message}\n{self.__format_errors__()}" - - -async def error_handler(tasks: list[Coroutine[None, None, None | list[BaseException]]]) -> None: - """Run all contained tasks and raise an error at the end if any failed.""" - errors: list[BaseException] = [] - for task in tasks: - try: - result = await task - except BaseException as err: - errors.append(err) - else: - if result is not None: - errors.extend(result) - - if len(errors) > 0: - raise BackgroundJobError(errors=errors) diff --git a/bases/renku_data_services/data_api/__init__.py b/bases/renku_data_services/data_api/__init__.py index 0af6be731..318dc4a63 100644 --- a/bases/renku_data_services/data_api/__init__.py +++ b/bases/renku_data_services/data_api/__init__.py @@ -1 +1,5 @@ """Renku data service.""" + +import renku_data_services.app_config.logging as logging + +logging.configure_logging() diff --git a/bases/renku_data_services/data_api/app.py b/bases/renku_data_services/data_api/app.py index 152bd53f3..fdbdb6e41 100644 --- a/bases/renku_data_services/data_api/app.py +++ b/bases/renku_data_services/data_api/app.py @@ -1,185 +1,255 @@ """Data service app.""" +from collections.abc import Callable +from typing import Any + from sanic import Sanic +from sanic_ext.exceptions import ValidationError +from sanic_ext.extras.validation.validators import VALIDATION_ERROR from ulid import ULID -from renku_data_services.app_config import Config +from renku_data_services import errors from renku_data_services.base_api.error_handler import CustomErrorHandler from renku_data_services.base_api.misc import MiscBP +from renku_data_services.base_models.core import Slug from renku_data_services.connected_services.blueprints import OAuth2ClientsBP, OAuth2ConnectionsBP from renku_data_services.crc import apispec from renku_data_services.crc.blueprints import ( ClassesBP, + ClustersBP, QuotaBP, ResourcePoolsBP, ResourcePoolUsersBP, UserResourcePoolsBP, ) +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.data_connectors.blueprints import DataConnectorsBP -from renku_data_services.message_queue.blueprints import MessageQueueBP from renku_data_services.namespace.blueprints import GroupsBP from renku_data_services.notebooks.blueprints import NotebooksBP, NotebooksNewBP from renku_data_services.platform.blueprints import PlatformConfigBP -from renku_data_services.project.blueprints import ProjectsBP +from renku_data_services.project.blueprints import ProjectsBP, ProjectSessionSecretBP from renku_data_services.repositories.blueprints import RepositoriesBP -from renku_data_services.session.blueprints import EnvironmentsBP, SessionLaunchersBP +from renku_data_services.search.blueprints import SearchBP +from renku_data_services.search.reprovision import SearchReprovision +from renku_data_services.search.solr_user_query import UsernameResolve +from renku_data_services.session.blueprints import BuildsBP, EnvironmentsBP, SessionLaunchersBP from renku_data_services.storage.blueprints import StorageBP, StorageSchemaBP from renku_data_services.users.blueprints import KCUsersBP, UserPreferencesBP, UserSecretsBP -def register_all_handlers(app: Sanic, config: Config) -> Sanic: +def str_to_slug(value: str) -> Slug: + """Convert a str to Slug.""" + try: + return Slug(value) + except errors.ValidationError as err: + raise ValueError("Couldn't parse slug") from err + + +def _patched_validate_body( + validator: Callable[[type[Any], dict[str, Any]], Any], + model: type[Any], + body: dict[str, Any], +) -> Any: + """Validate body method for monkey patching. + + sanic_ext does not return contained exceptions as errors anymore, instead it returns a string. + This undoes that change. + """ + try: + return validator(model, body) + except VALIDATION_ERROR as e: + raise ValidationError( + f"Invalid request body: {model.__name__}. Error: {e}", + extra={"exception": e}, + ) from e + + +def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: """Register all handlers on the application.""" + # WARNING: The regex is not actually used in most cases, instead the conversion function must raise a ValueError app.router.register_pattern("ulid", ULID.from_str, r"^[0-7][0-9A-HJKMNP-TV-Z]{25}$") - app.router.register_pattern("renku_slug", str, r"^[a-zA-Z0-9][a-zA-Z0-9\-_.]*$") + app.router.register_pattern("renku_slug", str_to_slug, r"^[a-zA-Z0-9][a-zA-Z0-9-_.]*$") url_prefix = "/api/data" resource_pools = ResourcePoolsBP( name="resource_pools", url_prefix=url_prefix, - rp_repo=config.rp_repo, - authenticator=config.authenticator, - user_repo=config.user_repo, + rp_repo=dm.rp_repo, + authenticator=dm.authenticator, + user_repo=dm.user_repo, + cluster_repo=dm.cluster_repo, ) - classes = ClassesBP(name="classes", url_prefix=url_prefix, repo=config.rp_repo, authenticator=config.authenticator) + classes = ClassesBP(name="classes", url_prefix=url_prefix, repo=dm.rp_repo, authenticator=dm.authenticator) quota = QuotaBP( name="quota", url_prefix=url_prefix, - rp_repo=config.rp_repo, - authenticator=config.authenticator, - quota_repo=config.quota_repo, + rp_repo=dm.rp_repo, + authenticator=dm.authenticator, + quota_repo=dm.quota_repo, ) - users = KCUsersBP(name="users", url_prefix=url_prefix, repo=config.kc_user_repo, authenticator=config.authenticator) + users = KCUsersBP(name="users", url_prefix=url_prefix, repo=dm.kc_user_repo, authenticator=dm.authenticator) user_secrets = UserSecretsBP( name="user_secrets", url_prefix=url_prefix, - user_repo=config.kc_user_repo, - secret_repo=config.user_secrets_repo, - secret_service_public_key=config.secrets_service_public_key, - authenticator=config.authenticator, + secret_repo=dm.user_secrets_repo, + authenticator=dm.authenticator, ) resource_pools_users = ResourcePoolUsersBP( name="resource_pool_users", url_prefix=url_prefix, - repo=config.user_repo, - authenticator=config.authenticator, - kc_user_repo=config.kc_user_repo, + repo=dm.user_repo, + authenticator=dm.authenticator, + kc_user_repo=dm.kc_user_repo, ) user_resource_pools = UserResourcePoolsBP( name="user_resource_pools", url_prefix=url_prefix, - repo=config.user_repo, - authenticator=config.authenticator, - kc_user_repo=config.kc_user_repo, + repo=dm.user_repo, + authenticator=dm.authenticator, + kc_user_repo=dm.kc_user_repo, ) + clusters = ClustersBP(name="clusters", url_prefix=url_prefix, repo=dm.cluster_repo, authenticator=dm.authenticator) storage = StorageBP( name="storage", url_prefix=url_prefix, - storage_repo=config.storage_repo, - authenticator=config.gitlab_authenticator, + storage_repo=dm.storage_repo, + authenticator=dm.gitlab_authenticator, ) storage_schema = StorageSchemaBP(name="storage_schema", url_prefix=url_prefix) user_preferences = UserPreferencesBP( name="user_preferences", url_prefix=url_prefix, - user_preferences_repo=config.user_preferences_repo, - authenticator=config.authenticator, + user_preferences_repo=dm.user_preferences_repo, + authenticator=dm.authenticator, ) - misc = MiscBP(name="misc", url_prefix=url_prefix, apispec=config.spec, version=config.version) + misc = MiscBP(name="misc", url_prefix=url_prefix, apispec=dm.spec, version=dm.config.version) project = ProjectsBP( name="projects", url_prefix=url_prefix, - project_repo=config.project_repo, - project_member_repo=config.project_member_repo, - authenticator=config.authenticator, - user_repo=config.kc_user_repo, - session_repo=config.session_repo, - data_connector_to_project_link_repo=config.data_connector_to_project_link_repo, + project_repo=dm.project_repo, + project_member_repo=dm.project_member_repo, + authenticator=dm.authenticator, + user_repo=dm.kc_user_repo, + session_repo=dm.session_repo, + data_connector_repo=dm.data_connector_repo, + project_migration_repo=dm.project_migration_repo, + metrics=dm.metrics, + ) + project_session_secrets = ProjectSessionSecretBP( + name="project_session_secrets", + url_prefix=url_prefix, + session_secret_repo=dm.project_session_secret_repo, + authenticator=dm.authenticator, ) group = GroupsBP( name="groups", url_prefix=url_prefix, - authenticator=config.authenticator, - group_repo=config.group_repo, + authenticator=dm.authenticator, + group_repo=dm.group_repo, + metrics=dm.metrics, ) session_environments = EnvironmentsBP( name="session_environments", url_prefix=url_prefix, - session_repo=config.session_repo, - authenticator=config.authenticator, + session_repo=dm.session_repo, + authenticator=dm.authenticator, ) session_launchers = SessionLaunchersBP( name="sessions_launchers", url_prefix=url_prefix, - session_repo=config.session_repo, - authenticator=config.authenticator, + session_repo=dm.session_repo, + authenticator=dm.authenticator, + metrics=dm.metrics, + ) + builds = ( + BuildsBP( + name="builds", + url_prefix=url_prefix, + session_repo=dm.session_repo, + authenticator=dm.authenticator, + ) + if dm.config.builds.enabled + else None ) oauth2_clients = OAuth2ClientsBP( name="oauth2_clients", url_prefix=url_prefix, - connected_services_repo=config.connected_services_repo, - authenticator=config.authenticator, + connected_services_repo=dm.connected_services_repo, + authenticator=dm.authenticator, ) oauth2_connections = OAuth2ConnectionsBP( name="oauth2_connections", url_prefix=url_prefix, - connected_services_repo=config.connected_services_repo, - authenticator=config.authenticator, - internal_gitlab_authenticator=config.gitlab_authenticator, + connected_services_repo=dm.connected_services_repo, + authenticator=dm.authenticator, + internal_gitlab_authenticator=dm.gitlab_authenticator, ) repositories = RepositoriesBP( name="repositories", url_prefix=url_prefix, - git_repositories_repo=config.git_repositories_repo, - authenticator=config.authenticator, - internal_gitlab_authenticator=config.gitlab_authenticator, + git_repositories_repo=dm.git_repositories_repo, + authenticator=dm.authenticator, + internal_gitlab_authenticator=dm.gitlab_authenticator, ) notebooks = NotebooksBP( name="notebooks_old", url_prefix=url_prefix, - authenticator=config.authenticator, - nb_config=config.nb_config, - internal_gitlab_authenticator=config.gitlab_authenticator, - git_repo=config.git_repositories_repo, - rp_repo=config.rp_repo, + authenticator=dm.authenticator, + nb_config=dm.config.nb_config, + internal_gitlab_authenticator=dm.gitlab_authenticator, + git_repo=dm.git_repositories_repo, + rp_repo=dm.rp_repo, + user_repo=dm.kc_user_repo, + storage_repo=dm.storage_repo, ) notebooks_new = NotebooksNewBP( name="notebooks", url_prefix=url_prefix, - authenticator=config.authenticator, - nb_config=config.nb_config, - project_repo=config.project_repo, - session_repo=config.session_repo, - storage_repo=config.storage_repo, - rp_repo=config.rp_repo, - data_connector_repo=config.data_connector_repo, - data_connector_project_link_repo=config.data_connector_to_project_link_repo, - data_connector_secret_repo=config.data_connector_secret_repo, - internal_gitlab_authenticator=config.gitlab_authenticator, + authenticator=dm.authenticator, + nb_config=dm.config.nb_config, + project_repo=dm.project_repo, + project_session_secret_repo=dm.project_session_secret_repo, + session_repo=dm.session_repo, + storage_repo=dm.storage_repo, + rp_repo=dm.rp_repo, + user_repo=dm.kc_user_repo, + data_connector_repo=dm.data_connector_repo, + data_connector_secret_repo=dm.data_connector_secret_repo, + cluster_repo=dm.cluster_repo, + internal_gitlab_authenticator=dm.gitlab_authenticator, + metrics=dm.metrics, ) platform_config = PlatformConfigBP( name="platform_config", url_prefix=url_prefix, - platform_repo=config.platform_repo, - authenticator=config.authenticator, + platform_repo=dm.platform_repo, + authenticator=dm.authenticator, ) - message_queue = MessageQueueBP( - name="search", + search = SearchBP( + name="search2", url_prefix=url_prefix, - authenticator=config.authenticator, - session_maker=config.db.async_session_maker, - reprovisioning_repo=config.reprovisioning_repo, - user_repo=config.kc_user_repo, - group_repo=config.group_repo, - project_repo=config.project_repo, - authz=config.authz, + authenticator=dm.authenticator, + username_resolve=UsernameResolve.db(dm.kc_user_repo), + search_reprovision=SearchReprovision( + search_updates_repo=dm.search_updates_repo, + reprovisioning_repo=dm.reprovisioning_repo, + solr_config=dm.config.solr, + user_repo=dm.kc_user_repo, + group_repo=dm.group_repo, + project_repo=dm.project_repo, + data_connector_repo=dm.data_connector_repo, + ), + solr_config=dm.config.solr, + authz=dm.authz, + metrics=dm.metrics, ) data_connectors = DataConnectorsBP( name="data_connectors", url_prefix=url_prefix, - data_connector_repo=config.data_connector_repo, - data_connector_to_project_link_repo=config.data_connector_to_project_link_repo, - data_connector_secret_repo=config.data_connector_secret_repo, - authenticator=config.authenticator, + data_connector_repo=dm.data_connector_repo, + data_connector_secret_repo=dm.data_connector_secret_repo, + authenticator=dm.authenticator, + metrics=dm.metrics, ) app.blueprint( [ @@ -190,11 +260,13 @@ def register_all_handlers(app: Sanic, config: Config) -> Sanic: users.blueprint(), user_secrets.blueprint(), user_resource_pools.blueprint(), + clusters.blueprint(), storage.blueprint(), storage_schema.blueprint(), user_preferences.blueprint(), misc.blueprint(), project.blueprint(), + project_session_secrets.blueprint(), group.blueprint(), session_environments.blueprint(), session_launchers.blueprint(), @@ -204,10 +276,17 @@ def register_all_handlers(app: Sanic, config: Config) -> Sanic: notebooks.blueprint(), notebooks_new.blueprint(), platform_config.blueprint(), - message_queue.blueprint(), + search.blueprint(), data_connectors.blueprint(), ] ) + if builds is not None: + app.blueprint(builds.blueprint()) + + # We need to patch sanic_extz as since version 24.12 they only send a string representation of errors + import sanic_ext.extras.validation.setup + + sanic_ext.extras.validation.setup.validate_body = _patched_validate_body app.error_handler = CustomErrorHandler(apispec) app.config.OAS = False diff --git a/bases/renku_data_services/data_api/config.py b/bases/renku_data_services/data_api/config.py new file mode 100644 index 000000000..1af9d86d5 --- /dev/null +++ b/bases/renku_data_services/data_api/config.py @@ -0,0 +1,79 @@ +"""Configuration for data api.""" + +import os +from dataclasses import dataclass +from typing import Self + +from renku_data_services import errors +from renku_data_services.app_config.config import KeycloakConfig, PosthogConfig, SentryConfig, TrustedProxiesConfig +from renku_data_services.app_config.logging import Config as LoggingConfig +from renku_data_services.authz.config import AuthzConfig +from renku_data_services.db_config.config import DBConfig +from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.config.dynamic import ServerOptionsConfig +from renku_data_services.secrets.config import PublicSecretsConfig +from renku_data_services.session.config import BuildsConfig +from renku_data_services.solr.solr_client import SolrClientConfig +from renku_data_services.users.config import UserPreferencesConfig + + +@dataclass +class Config: + """Application configuration.""" + + dummy_stores: bool + k8s_namespace: str + k8s_config_root: str + db: DBConfig + builds: BuildsConfig + nb_config: NotebooksConfig + secrets: PublicSecretsConfig + sentry: SentryConfig + posthog: PosthogConfig + solr: SolrClientConfig + authz_config: AuthzConfig + trusted_proxies: TrustedProxiesConfig + keycloak: KeycloakConfig | None + user_preferences: UserPreferencesConfig + server_options: ServerOptionsConfig + gitlab_url: str | None + log_cfg: LoggingConfig + version: str + + @classmethod + def from_env(cls, db: DBConfig | None = None) -> Self: + """Load config from environment.""" + + dummy_stores = os.environ.get("DUMMY_STORES", "false").lower() == "true" + if db is None: + db = DBConfig.from_env() + + if dummy_stores: + keycloak = None + gitlab_url = None + else: + keycloak = KeycloakConfig.from_env() + gitlab_url = os.environ.get("GITLAB_URL") + if gitlab_url is None: + raise errors.ConfigurationError(message="Please provide the gitlab instance URL") + + return cls( + version=os.environ.get("VERSION", "0.0.1"), + dummy_stores=dummy_stores, + k8s_namespace=os.environ.get("K8S_NAMESPACE", "default"), + k8s_config_root=os.environ.get("K8S_CONFIGS_ROOT", "/secrets/kube_configs"), + db=db, + builds=BuildsConfig.from_env(), + nb_config=NotebooksConfig.from_env(db), + secrets=PublicSecretsConfig.from_env(), + sentry=SentryConfig.from_env(), + posthog=PosthogConfig.from_env(), + authz_config=AuthzConfig.from_env(), + solr=SolrClientConfig.from_env(), + trusted_proxies=TrustedProxiesConfig.from_env(), + keycloak=keycloak, + user_preferences=UserPreferencesConfig.from_env(), + server_options=ServerOptionsConfig.from_env(), + gitlab_url=gitlab_url, + log_cfg=LoggingConfig.from_env(), + ) diff --git a/bases/renku_data_services/data_api/dependencies.py b/bases/renku_data_services/data_api/dependencies.py new file mode 100644 index 000000000..4468568f2 --- /dev/null +++ b/bases/renku_data_services/data_api/dependencies.py @@ -0,0 +1,420 @@ +"""Dependency management for data api.""" + +import functools +import os +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any + +from authlib.integrations.httpx_client import AsyncOAuth2Client +from jwt import PyJWKClient +from yaml import safe_load + +import renku_data_services.base_models as base_models +import renku_data_services.connected_services +import renku_data_services.crc +import renku_data_services.data_connectors +import renku_data_services.platform +import renku_data_services.repositories +import renku_data_services.search +import renku_data_services.storage +import renku_data_services.users +from renku_data_services import errors +from renku_data_services.authn.dummy import DummyAuthenticator, DummyUserStore +from renku_data_services.authn.gitlab import GitlabAuthenticator +from renku_data_services.authn.keycloak import KcUserStore, KeycloakAuthenticator +from renku_data_services.authz.authz import Authz +from renku_data_services.connected_services.db import ConnectedServicesRepository +from renku_data_services.crc import models as crc_models +from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository, UserRepository +from renku_data_services.crc.server_options import ( + ServerOptions, + ServerOptionsDefaults, + generate_default_resource_pool, +) +from renku_data_services.data_api.config import Config +from renku_data_services.data_connectors.db import ( + DataConnectorRepository, + DataConnectorSecretRepository, +) +from renku_data_services.git.gitlab import DummyGitlabAPI, GitlabAPI +from renku_data_services.k8s.clients import ( + DummyCoreClient, + DummySchedulingClient, + K8sClusterClientsPool, + K8sCoreClient, + K8sSchedulingClient, +) +from renku_data_services.k8s.config import KubeConfigEnv +from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s_watcher import K8sDbCache +from renku_data_services.message_queue.db import ReprovisioningRepository +from renku_data_services.metrics.core import StagingMetricsService +from renku_data_services.metrics.db import MetricsRepository +from renku_data_services.namespace.db import GroupRepository +from renku_data_services.notebooks.config import get_clusters +from renku_data_services.notebooks.constants import AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK +from renku_data_services.platform.db import PlatformRepository +from renku_data_services.project.db import ( + ProjectMemberRepository, + ProjectMigrationRepository, + ProjectRepository, + ProjectSessionSecretRepository, +) +from renku_data_services.repositories.db import GitRepositoriesRepository +from renku_data_services.search import query_manual +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.reprovision import SearchReprovision +from renku_data_services.secrets.db import LowLevelUserSecretsRepo, UserSecretsRepo +from renku_data_services.session.constants import BUILD_RUN_GVK, TASK_RUN_GVK +from renku_data_services.session.db import SessionRepository +from renku_data_services.session.k8s_client import ShipwrightClient +from renku_data_services.storage.db import StorageRepository +from renku_data_services.users.db import UserPreferencesRepository +from renku_data_services.users.db import UserRepo as KcUserRepo +from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI +from renku_data_services.users.kc_api import IKeycloakAPI, KeycloakAPI +from renku_data_services.users.models import UnsavedUserInfo +from renku_data_services.utils.core import merge_api_specs, oidc_discovery + +default_resource_pool = crc_models.ResourcePool( + name="default", + classes=[ + crc_models.ResourceClass( + name="small", + cpu=0.5, + memory=1, + max_storage=20, + gpu=0, + default=True, + ), + crc_models.ResourceClass( + name="large", + cpu=1.0, + memory=2, + max_storage=20, + gpu=0, + default=False, + ), + ], + quota=None, + public=True, + default=True, +) + + +@dataclass +class DependencyManager: + """Configuration for the Data service.""" + + config: Config + + user_store: base_models.UserStore + authenticator: base_models.Authenticator + gitlab_authenticator: base_models.Authenticator + quota_repo: QuotaRepository + gitlab_client: base_models.GitlabAPIProtocol + kc_api: IKeycloakAPI + authz: Authz + user_repo: UserRepository + rp_repo: ResourcePoolRepository + storage_repo: StorageRepository + project_repo: ProjectRepository + project_migration_repo: ProjectMigrationRepository + group_repo: GroupRepository + reprovisioning_repo: ReprovisioningRepository + search_updates_repo: SearchUpdatesRepo + search_reprovisioning: SearchReprovision + session_repo: SessionRepository + user_preferences_repo: UserPreferencesRepository + kc_user_repo: KcUserRepo + low_level_user_secrets_repo: LowLevelUserSecretsRepo + user_secrets_repo: UserSecretsRepo + project_member_repo: ProjectMemberRepository + project_session_secret_repo: ProjectSessionSecretRepository + connected_services_repo: ConnectedServicesRepository + git_repositories_repo: GitRepositoriesRepository + platform_repo: PlatformRepository + data_connector_repo: DataConnectorRepository + data_connector_secret_repo: DataConnectorSecretRepository + cluster_repo: ClusterRepository + metrics_repo: MetricsRepository + metrics: StagingMetricsService + shipwright_client: ShipwrightClient | None + + spec: dict[str, Any] = field(init=False, repr=False, default_factory=dict) + app_name: str = "renku_data_services" + default_resource_pool_file: str | None = None + default_resource_pool: crc_models.ResourcePool = default_resource_pool + async_oauth2_client_class: type[AsyncOAuth2Client] = AsyncOAuth2Client + + @staticmethod + @functools.cache + def load_apispec() -> dict[str, Any]: + """Load apispec with caching. + + Note: loading these files takes quite some time and is repeated for each test. Having + them cached in this method reduces that time significantly. + """ + files = [ + renku_data_services.crc.__file__, + renku_data_services.storage.__file__, + renku_data_services.users.__file__, + renku_data_services.project.__file__, + renku_data_services.namespace.__file__, + renku_data_services.session.__file__, + renku_data_services.connected_services.__file__, + renku_data_services.repositories.__file__, + renku_data_services.notebooks.__file__, + renku_data_services.platform.__file__, + renku_data_services.data_connectors.__file__, + renku_data_services.search.__file__, + ] + + api_specs = [] + + # NOTE: Read spec files required for Swagger + for file in files: + spec_file = Path(file).resolve().parent / "api.spec.yaml" + with open(spec_file) as f: + yaml_content = safe_load(f) + if file == renku_data_services.search.__file__: + qm = query_manual.safe_manual_to_str() + yaml_content["paths"]["/search/query"]["get"]["description"] = qm + + api_specs.append(yaml_content) + + return merge_api_specs(*api_specs) + + def __post_init__(self) -> None: + self.spec = self.load_apispec() + + if self.default_resource_pool_file is not None: + with open(self.default_resource_pool_file) as f: + self.default_resource_pool = crc_models.ResourcePool.from_dict(safe_load(f)) + if ( + self.config.server_options.defaults_path is not None + and self.config.server_options.ui_choices_path is not None + ): + with open(self.config.server_options.ui_choices_path) as f: + options = ServerOptions.model_validate(safe_load(f)) + with open(self.config.server_options.defaults_path) as f: + defaults = ServerOptionsDefaults.model_validate(safe_load(f)) + self.default_resource_pool = generate_default_resource_pool(options, defaults) + + @classmethod + def from_env(cls) -> "DependencyManager": + """Create a config from environment variables.""" + + user_store: base_models.UserStore + authenticator: base_models.Authenticator + gitlab_authenticator: base_models.Authenticator + gitlab_client: base_models.GitlabAPIProtocol + shipwright_client: ShipwrightClient | None = None + + config = Config.from_env() + kc_api: IKeycloakAPI + cluster_repo = ClusterRepository(session_maker=config.db.async_session_maker) + + if config.dummy_stores: + authenticator = DummyAuthenticator() + gitlab_authenticator = DummyAuthenticator() + quota_repo = QuotaRepository( + DummyCoreClient({}, {}), DummySchedulingClient({}), namespace=config.k8s_namespace + ) + user_always_exists = os.environ.get("DUMMY_USERSTORE_USER_ALWAYS_EXISTS", "true").lower() == "true" + user_store = DummyUserStore(user_always_exists=user_always_exists) + gitlab_client = DummyGitlabAPI() + dummy_users = [ + UnsavedUserInfo(id="user1", first_name="user1", last_name="doe", email="user1@doe.com"), + UnsavedUserInfo(id="user2", first_name="user2", last_name="doe", email="user2@doe.com"), + ] + kc_api = DummyKeycloakAPI(users=[i.to_keycloak_dict() for i in dummy_users]) + else: + quota_repo = QuotaRepository(K8sCoreClient(), K8sSchedulingClient(), namespace=config.k8s_namespace) + assert config.keycloak is not None + oidc_disc_data = oidc_discovery(config.keycloak.url, config.keycloak.realm) + jwks_url = oidc_disc_data.get("jwks_uri") + if jwks_url is None: + raise errors.ConfigurationError( + message="The JWKS url for Keycloak cannot be found from the OIDC discovery endpoint." + ) + jwks = PyJWKClient(jwks_url) + if config.keycloak.algorithms is None: + raise errors.ConfigurationError(message="At least one token signature algorithm is required.") + + authenticator = KeycloakAuthenticator(jwks=jwks, algorithms=config.keycloak.algorithms) + assert config.gitlab_url is not None + gitlab_authenticator = GitlabAuthenticator(gitlab_url=config.gitlab_url) + user_store = KcUserStore(keycloak_url=config.keycloak.url, realm=config.keycloak.realm) + gitlab_client = GitlabAPI(gitlab_url=config.gitlab_url) + kc_api = KeycloakAPI( + keycloak_url=config.keycloak.url, + client_id=config.keycloak.client_id, + client_secret=config.keycloak.client_secret, + realm=config.keycloak.realm, + ) + if config.builds.enabled: + # NOTE: we need to get an async client as a sync client can't be used in an async way + # But all the config code is not async, so we need to drop into the running loop, if there is one + kr8s_api = KubeConfigEnv().api() + k8s_db_cache = K8sDbCache(config.db.async_session_maker) + client = K8sClusterClientsPool( + get_clusters=get_clusters( + kube_conf_root_dir=config.k8s_config_root, + namespace=config.k8s_namespace, + api=kr8s_api, + cluster_rp=cluster_repo, + ), + cache=k8s_db_cache, + kinds_to_cache=[AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK, BUILD_RUN_GVK, TASK_RUN_GVK], + ) + shipwright_client = ShipwrightClient( + client=client, + namespace=config.k8s_namespace, + ) + + authz = Authz(config.authz_config) + search_updates_repo = SearchUpdatesRepo(session_maker=config.db.async_session_maker) + group_repo = GroupRepository( + session_maker=config.db.async_session_maker, + group_authz=authz, + search_updates_repo=search_updates_repo, + ) + kc_user_repo = KcUserRepo( + session_maker=config.db.async_session_maker, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + encryption_key=config.secrets.encryption_key, + authz=authz, + ) + + user_repo = UserRepository( + session_maker=config.db.async_session_maker, + quotas_repo=quota_repo, + user_repo=kc_user_repo, + ) + rp_repo = ResourcePoolRepository(session_maker=config.db.async_session_maker, quotas_repo=quota_repo) + storage_repo = StorageRepository( + session_maker=config.db.async_session_maker, + gitlab_client=gitlab_client, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + ) + reprovisioning_repo = ReprovisioningRepository(session_maker=config.db.async_session_maker) + project_repo = ProjectRepository( + session_maker=config.db.async_session_maker, + authz=authz, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + ) + session_repo = SessionRepository( + session_maker=config.db.async_session_maker, + project_authz=authz, + resource_pools=rp_repo, + shipwright_client=shipwright_client, + builds_config=config.builds, + ) + project_migration_repo = ProjectMigrationRepository( + session_maker=config.db.async_session_maker, + authz=authz, + project_repo=project_repo, + session_repo=session_repo, + ) + project_member_repo = ProjectMemberRepository( + session_maker=config.db.async_session_maker, + authz=authz, + ) + project_session_secret_repo = ProjectSessionSecretRepository( + session_maker=config.db.async_session_maker, + authz=authz, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + ) + user_preferences_repo = UserPreferencesRepository( + session_maker=config.db.async_session_maker, + user_preferences_config=config.user_preferences, + ) + low_level_user_secrets_repo = LowLevelUserSecretsRepo( + session_maker=config.db.async_session_maker, + ) + user_secrets_repo = UserSecretsRepo( + session_maker=config.db.async_session_maker, + low_level_repo=low_level_user_secrets_repo, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + ) + connected_services_repo = ConnectedServicesRepository( + session_maker=config.db.async_session_maker, + encryption_key=config.secrets.encryption_key, + async_oauth2_client_class=cls.async_oauth2_client_class, + internal_gitlab_url=config.gitlab_url, + ) + git_repositories_repo = GitRepositoriesRepository( + session_maker=config.db.async_session_maker, + connected_services_repo=connected_services_repo, + internal_gitlab_url=config.gitlab_url, + ) + platform_repo = PlatformRepository( + session_maker=config.db.async_session_maker, + ) + data_connector_repo = DataConnectorRepository( + session_maker=config.db.async_session_maker, + authz=authz, + project_repo=project_repo, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + ) + data_connector_secret_repo = DataConnectorSecretRepository( + session_maker=config.db.async_session_maker, + data_connector_repo=data_connector_repo, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + authz=authz, + ) + search_reprovisioning = SearchReprovision( + search_updates_repo=search_updates_repo, + reprovisioning_repo=reprovisioning_repo, + solr_config=config.solr, + user_repo=kc_user_repo, + group_repo=group_repo, + project_repo=project_repo, + data_connector_repo=data_connector_repo, + ) + metrics_repo = MetricsRepository(session_maker=config.db.async_session_maker) + metrics = StagingMetricsService(enabled=config.posthog.enabled, metrics_repo=metrics_repo) + return cls( + config, + authenticator=authenticator, + gitlab_authenticator=gitlab_authenticator, + gitlab_client=gitlab_client, + user_store=user_store, + quota_repo=quota_repo, + kc_api=kc_api, + user_repo=user_repo, + rp_repo=rp_repo, + storage_repo=storage_repo, + reprovisioning_repo=reprovisioning_repo, + search_updates_repo=search_updates_repo, + search_reprovisioning=search_reprovisioning, + project_repo=project_repo, + project_migration_repo=project_migration_repo, + project_member_repo=project_member_repo, + project_session_secret_repo=project_session_secret_repo, + group_repo=group_repo, + session_repo=session_repo, + user_preferences_repo=user_preferences_repo, + kc_user_repo=kc_user_repo, + user_secrets_repo=user_secrets_repo, + connected_services_repo=connected_services_repo, + git_repositories_repo=git_repositories_repo, + platform_repo=platform_repo, + data_connector_repo=data_connector_repo, + data_connector_secret_repo=data_connector_secret_repo, + cluster_repo=cluster_repo, + metrics_repo=metrics_repo, + metrics=metrics, + shipwright_client=shipwright_client, + authz=authz, + low_level_user_secrets_repo=low_level_user_secrets_repo, + ) diff --git a/bases/renku_data_services/data_api/main.py b/bases/renku_data_services/data_api/main.py index cf8d0f6a8..831a9018c 100644 --- a/bases/renku_data_services/data_api/main.py +++ b/bases/renku_data_services/data_api/main.py @@ -7,17 +7,20 @@ import sentry_sdk import uvloop -from sanic import Sanic -from sanic.log import logger +from sanic import Request, Sanic +from sanic.response import BaseHTTPResponse from sanic.worker.loader import AppLoader from sentry_sdk.integrations.asyncio import AsyncioIntegration from sentry_sdk.integrations.grpc import GRPCIntegration from sentry_sdk.integrations.sanic import SanicIntegration, _context_enter, _context_exit, _set_transaction -from renku_data_services.app_config import Config +import renku_data_services.solr.entity_schema as entity_schema +from renku_data_services.app_config import logging from renku_data_services.authz.admin_sync import sync_admins_from_keycloak +from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId from renku_data_services.data_api.app import register_all_handlers -from renku_data_services.data_api.prometheus import collect_system_metrics, setup_app_metrics, setup_prometheus +from renku_data_services.data_api.dependencies import DependencyManager +from renku_data_services.data_api.prometheus import setup_app_metrics, setup_prometheus from renku_data_services.errors.errors import ( ForbiddenError, MissingResourceError, @@ -25,6 +28,7 @@ ValidationError, ) from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.solr.solr_migrate import SchemaMigrator from renku_data_services.storage.rclone import RCloneValidator from renku_data_services.utils.middleware import validate_null_byte @@ -32,48 +36,48 @@ import sentry_sdk._types -async def _send_messages(app: Sanic) -> None: - config = Config.from_env() - while True: - try: - await config.event_repo.send_pending_events() - # we need to collect metrics for this background process separately from the task we add to the - # server processes - await collect_system_metrics(app, "send_events_worker") - await asyncio.sleep(1) - except (asyncio.CancelledError, KeyboardInterrupt) as e: - logger.warning(f"Exiting: {e}") - return - except Exception as e: - logger.warning(f"Background task failed: {e}") - raise - - -def send_pending_events(app_name: str) -> None: - """Send pending messages in case sending in a handler failed.""" +logger = logging.getLogger(__name__) + + +async def _solr_reindex(app: Sanic) -> None: + """Run a solr reindex of all data. + + This might be required after migrating the solr schema. + """ + config = DependencyManager.from_env() + reprovision = config.search_reprovisioning + admin = InternalServiceAdmin(id=ServiceAdminId.search_reprovision) + await reprovision.run_reprovision(admin) + + +def solr_reindex(app_name: str) -> None: + """Runs a solr reindex.""" app = Sanic(app_name) setup_app_metrics(app) - logger.info("running events sending loop.") - + logger.info("Running SOLR reindex triggered by a migration") asyncio.set_event_loop(uvloop.new_event_loop()) - asyncio.run(_send_messages(app)) + asyncio.run(_solr_reindex(app)) def create_app() -> Sanic: """Create a Sanic application.""" - config = Config.from_env() - app = Sanic(config.app_name) + dependency_manager = DependencyManager.from_env() + app = Sanic(dependency_manager.app_name, configure_logging=False) if "COVERAGE_RUN" in environ: app.config.TOUCHUP = False # NOTE: in single process mode where we usually run schemathesis to get coverage the db migrations # specified below with the main_process_start decorator do not run. run_migrations_for_app("common") - asyncio.run(config.rp_repo.initialize(config.db.conn_url(async_client=False), config.default_resource_pool)) - asyncio.run(config.kc_user_repo.initialize(config.kc_api)) - asyncio.run(sync_admins_from_keycloak(config.kc_api, config.authz)) - if config.sentry.enabled: + asyncio.run( + dependency_manager.rp_repo.initialize( + dependency_manager.config.db.conn_url(async_client=False), dependency_manager.default_resource_pool + ) + ) + asyncio.run(dependency_manager.kc_user_repo.initialize(dependency_manager.kc_api)) + asyncio.run(sync_admins_from_keycloak(dependency_manager.kc_api, dependency_manager.authz)) + if dependency_manager.config.sentry.enabled: logger.info("enabling sentry") def filter_error( @@ -88,15 +92,15 @@ def filter_error( @app.before_server_start async def setup_sentry(_: Sanic) -> None: sentry_sdk.init( - dsn=config.sentry.dsn, - environment=config.sentry.environment, + dsn=dependency_manager.config.sentry.dsn, + environment=dependency_manager.config.sentry.environment, integrations=[ AsyncioIntegration(), SanicIntegration(unsampled_statuses={404, 403, 401}), GRPCIntegration(), ], - enable_tracing=config.sentry.sample_rate > 0, - traces_sample_rate=config.sentry.sample_rate, + enable_tracing=dependency_manager.config.sentry.sample_rate > 0, + traces_sample_rate=dependency_manager.config.sentry.sample_rate, before_send=filter_error, in_app_include=["renku_data_services"], ) @@ -106,14 +110,17 @@ async def setup_sentry(_: Sanic) -> None: app.signal("http.lifecycle.request")(_context_enter) app.signal("http.lifecycle.response")(_context_exit) app.signal("http.routing.after")(_set_transaction) - if config.trusted_proxies.proxies_count is not None and config.trusted_proxies.proxies_count > 0: - app.config.PROXIES_COUNT = config.trusted_proxies.proxies_count + if ( + dependency_manager.config.trusted_proxies.proxies_count is not None + and dependency_manager.config.trusted_proxies.proxies_count > 0 + ): + app.config.PROXIES_COUNT = dependency_manager.config.trusted_proxies.proxies_count logger.info(f"PROXIES_COUNT = {app.config.PROXIES_COUNT}") - if config.trusted_proxies.real_ip_header: - app.config.REAL_IP_HEADER = config.trusted_proxies.real_ip_header + if dependency_manager.config.trusted_proxies.real_ip_header: + app.config.REAL_IP_HEADER = dependency_manager.config.trusted_proxies.real_ip_header logger.info(f"REAL_IP_HEADER = {app.config.REAL_IP_HEADER}") - app = register_all_handlers(app, config) + app = register_all_handlers(app, dependency_manager) setup_prometheus(app) if environ.get("CORS_ALLOW_ALL_ORIGINS", "false").lower() == "true": @@ -124,11 +131,37 @@ async def setup_sentry(_: Sanic) -> None: app.register_middleware(validate_null_byte, "request") + @app.on_request + async def set_request_id(request: Request) -> None: + logging.set_request_id(str(request.id)) + + @app.middleware("response") + async def set_request_id_header(request: Request, response: BaseHTTPResponse) -> None: + response.headers["X-Request-ID"] = request.id + + @app.middleware("response") + async def handle_head(request: Request, response: BaseHTTPResponse) -> None: + """Make sure HEAD requests return an empty body.""" + if request.method == "HEAD": + response.body = None + @app.main_process_start async def do_migrations(_: Sanic) -> None: logger.info("running migrations") run_migrations_for_app("common") - await config.rp_repo.initialize(config.db.conn_url(async_client=False), config.default_resource_pool) + await dependency_manager.rp_repo.initialize( + dependency_manager.config.db.conn_url(async_client=False), dependency_manager.default_resource_pool + ) + + @app.main_process_start + async def do_solr_migrations(app: Sanic) -> None: + logger.info(f"Running SOLR migrations at: {dependency_manager.config.solr}") + migrator = SchemaMigrator(dependency_manager.config.solr) + await migrator.ensure_core() + result = await migrator.migrate(entity_schema.all_migrations) + # starting background tasks can only be done in `main_process_ready` + app.ctx.solr_reindex = result.requires_reindex + logger.info(f"SOLR migration done: {result}") @app.before_server_start async def setup_rclone_validator(app: Sanic) -> None: @@ -138,8 +171,17 @@ async def setup_rclone_validator(app: Sanic) -> None: @app.main_process_ready async def ready(app: Sanic) -> None: """Application ready event handler.""" - logger.info("starting events background job.") - app.manager.manage("SendEvents", send_pending_events, {"app_name": config.app_name}, transient=True) + if getattr(app.ctx, "solr_reindex", False): + logger.info("Starting solr reindex, as required by migrations.") + app.manager.manage("SolrReindex", solr_reindex, {"app_name": app.name}, transient=True) + + @app.before_server_start + async def logging_setup1(app: Sanic) -> None: + logging.configure_logging(dependency_manager.config.log_cfg) + + @app.main_process_ready + async def logging_setup2(app: Sanic) -> None: + logging.configure_logging(dependency_manager.config.log_cfg) return app diff --git a/bases/renku_data_services/data_tasks/__init__.py b/bases/renku_data_services/data_tasks/__init__.py new file mode 100644 index 000000000..99d7778cf --- /dev/null +++ b/bases/renku_data_services/data_tasks/__init__.py @@ -0,0 +1,5 @@ +"""Renku data service tasks.""" + +import renku_data_services.app_config.logging as logging + +logging.configure_logging() diff --git a/bases/renku_data_services/data_tasks/config.py b/bases/renku_data_services/data_tasks/config.py new file mode 100644 index 000000000..498c932a4 --- /dev/null +++ b/bases/renku_data_services/data_tasks/config.py @@ -0,0 +1,85 @@ +"""Data tasks configuration.""" + +from __future__ import annotations + +import os +from dataclasses import dataclass + +from renku_data_services.app_config.config import KeycloakConfig +from renku_data_services.authz.config import AuthzConfig +from renku_data_services.db_config.config import DBConfig +from renku_data_services.solr.solr_client import SolrClientConfig + + +@dataclass +class PosthogConfig: + """Configuration for posthog.""" + + enabled: bool + api_key: str + host: str + environment: str + + @classmethod + def from_env( + cls, + ) -> PosthogConfig: + """Create posthog config from environment variables.""" + enabled = os.environ.get("POSTHOG_ENABLED", "false").lower() == "true" + api_key = os.environ.get("POSTHOG_API_KEY", "") + host = os.environ.get("POSTHOG_HOST", "") + environment = os.environ.get("POSTHOG_ENVIRONMENT", "development") + + return cls(enabled, api_key, host, environment) + + +@dataclass +class Config: + """Configuration for data tasks.""" + + db: DBConfig + solr: SolrClientConfig + posthog: PosthogConfig + authz: AuthzConfig + keycloak: KeycloakConfig | None + dummy_stores: bool + max_retry_wait_seconds: int + main_log_interval_seconds: int + tcp_host: str + tcp_port: int + short_task_period_s: int + long_task_period_s: int + + @classmethod + def from_env(cls) -> Config: + """Creates a config object from environment variables.""" + + dummy_stores = os.environ.get("DUMMY_STORES", "false").lower() == "true" + + max_retry = int(os.environ.get("MAX_RETRY_WAIT_SECONDS", "120")) + main_tick = int(os.environ.get("MAIN_LOG_INTERVAL_SECONDS", "300")) + solr_config = SolrClientConfig.from_env() + posthog_config = PosthogConfig.from_env() + tcp_host = os.environ.get("TCP_HOST", "127.0.0.1") + tcp_port = int(os.environ.get("TCP_PORT", "8001")) + + short_task_period = int(os.environ.get("SHORT_TASK_PERIOD_S", 2 * 60)) + long_task_period = int(os.environ.get("LONG_TASK_PERIOD_S", 3 * 60 * 60)) + + authz = AuthzConfig.from_env() + + keycloak = None if dummy_stores else KeycloakConfig.from_env() + return Config( + db=DBConfig.from_env(), + max_retry_wait_seconds=max_retry, + main_log_interval_seconds=main_tick, + solr=solr_config, + posthog=posthog_config, + authz=authz, + keycloak=keycloak, + tcp_host=tcp_host, + tcp_port=tcp_port, + short_task_period_s=short_task_period, + long_task_period_s=long_task_period, + dummy_stores=dummy_stores, + ) diff --git a/bases/renku_data_services/data_tasks/dependencies.py b/bases/renku_data_services/data_tasks/dependencies.py new file mode 100644 index 000000000..402c73b9f --- /dev/null +++ b/bases/renku_data_services/data_tasks/dependencies.py @@ -0,0 +1,87 @@ +"""Dependency management for data tasks.""" + +from dataclasses import dataclass + +from renku_data_services.authz.authz import Authz +from renku_data_services.data_tasks.config import Config +from renku_data_services.metrics.db import MetricsRepository +from renku_data_services.namespace.db import GroupRepository +from renku_data_services.project.db import ProjectRepository +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.users.db import UserRepo, UsersSync +from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI +from renku_data_services.users.kc_api import IKeycloakAPI, KeycloakAPI +from renku_data_services.users.models import UnsavedUserInfo + + +@dataclass +class DependencyManager: + """Configuration for the Data service.""" + + config: Config + search_updates_repo: SearchUpdatesRepo + metrics_repo: MetricsRepository + group_repo: GroupRepository + project_repo: ProjectRepository + authz: Authz + syncer: UsersSync + kc_api: IKeycloakAPI + + @classmethod + def from_env(cls, cfg: Config | None = None) -> "DependencyManager": + """Create a config from environment variables.""" + if cfg is None: + cfg = Config.from_env() + search_updates_repo = SearchUpdatesRepo(cfg.db.async_session_maker) + metrics_repo = MetricsRepository(cfg.db.async_session_maker) + authz = Authz(cfg.authz) + group_repo = GroupRepository( + cfg.db.async_session_maker, + group_authz=authz, + search_updates_repo=search_updates_repo, + ) + project_repo = ProjectRepository( + session_maker=cfg.db.async_session_maker, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + authz=authz, + ) + user_repo = UserRepo( + session_maker=cfg.db.async_session_maker, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + encryption_key=None, + authz=authz, + ) + syncer = UsersSync( + cfg.db.async_session_maker, + group_repo=group_repo, + user_repo=user_repo, + authz=authz, + ) + kc_api: IKeycloakAPI + if cfg.dummy_stores: + dummy_users = [ + UnsavedUserInfo(id="user1", first_name="user1", last_name="doe", email="user1@doe.com"), + UnsavedUserInfo(id="user2", first_name="user2", last_name="doe", email="user2@doe.com"), + ] + kc_api = DummyKeycloakAPI(users=[i.to_keycloak_dict() for i in dummy_users]) + else: + assert cfg.keycloak is not None + kc_api = KeycloakAPI( + keycloak_url=cfg.keycloak.url, + client_id=cfg.keycloak.client_id, + client_secret=cfg.keycloak.client_secret, + realm=cfg.keycloak.realm, + ) + + return cls( + config=cfg, + search_updates_repo=search_updates_repo, + metrics_repo=metrics_repo, + group_repo=group_repo, + project_repo=project_repo, + authz=authz, + syncer=syncer, + kc_api=kc_api, + ) diff --git a/bases/renku_data_services/data_tasks/main.py b/bases/renku_data_services/data_tasks/main.py new file mode 100644 index 000000000..232d131d9 --- /dev/null +++ b/bases/renku_data_services/data_tasks/main.py @@ -0,0 +1,45 @@ +"""The entrypoint for the data service application.""" + +import asyncio +import logging + +import uvloop + +from renku_data_services.app_config.logging import getLogger +from renku_data_services.data_tasks.dependencies import DependencyManager +from renku_data_services.data_tasks.task_defs import all_tasks +from renku_data_services.data_tasks.taskman import TaskDefininions, TaskManager +from renku_data_services.data_tasks.tcp_handler import TcpHandler + +logger = getLogger(__name__) + + +async def log_tasks(logger: logging.Logger, tm: TaskManager, interval: int) -> None: + """Log the currently running tasks each `interval`.""" + while interval > 0: + await asyncio.sleep(interval) + tasks = tm.current_tasks() + tasks.sort(key=lambda e: e.name) + lines = "\n".join([f"- {e.name}: since {e.started} ({e.restarts} restarts)" for e in tasks]) + logger.info(f"********* Tasks ********\n{lines}") + + +async def main() -> None: + """Data tasks entry point.""" + dm = DependencyManager.from_env() + logger.info(f"Config: {dm.config}") + + tm = TaskManager(dm.config.max_retry_wait_seconds) + internal_tasks = TaskDefininions({"_log_tasks": lambda: log_tasks(logger, tm, dm.config.main_log_interval_seconds)}) + logger.info("Tasks starting...") + tm.start_all(all_tasks(dm).merge(internal_tasks)) + + logger.info(f"Starting tcp server at {dm.config.tcp_host}:{dm.config.tcp_port}") + tcp_handler = TcpHandler(tm) + server = await asyncio.start_server(tcp_handler.run, dm.config.tcp_host, dm.config.tcp_port) + async with server: + await server.serve_forever() + + +if __name__ == "__main__": + uvloop.run(main()) diff --git a/components/renku_data_services/message_queue/py.typed b/bases/renku_data_services/data_tasks/py.typed similarity index 100% rename from components/renku_data_services/message_queue/py.typed rename to bases/renku_data_services/data_tasks/py.typed diff --git a/bases/renku_data_services/data_tasks/task_defs.py b/bases/renku_data_services/data_tasks/task_defs.py new file mode 100644 index 000000000..b5696dddd --- /dev/null +++ b/bases/renku_data_services/data_tasks/task_defs.py @@ -0,0 +1,406 @@ +"""The task definitions in form of coroutines.""" + +import asyncio + +from authzed.api.v1 import ( + Consistency, + LookupResourcesRequest, + ObjectReference, + ReadRelationshipsRequest, + Relationship, + RelationshipFilter, + RelationshipUpdate, + SubjectFilter, + SubjectReference, + WriteRelationshipsRequest, +) +from ulid import ULID + +import renku_data_services.authz.admin_sync as admin_sync +import renku_data_services.search.core as search_core +from renku_data_services import errors +from renku_data_services.app_config import logging +from renku_data_services.authz.authz import ResourceType, _AuthzConverter, _Relation +from renku_data_services.authz.models import Scope +from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId +from renku_data_services.data_tasks.dependencies import DependencyManager +from renku_data_services.data_tasks.taskman import TaskDefininions +from renku_data_services.namespace.models import NamespaceKind +from renku_data_services.solr.solr_client import DefaultSolrClient + +logger = logging.getLogger(__name__) + + +async def update_search(dm: DependencyManager) -> None: + """Update the SOLR with data from the search staging table.""" + while True: + async with DefaultSolrClient(dm.config.solr) as client: + await search_core.update_solr(dm.search_updates_repo, client, 20) + await asyncio.sleep(1) + + +async def send_metrics_to_posthog(dm: DependencyManager) -> None: + """Send pending product metrics to posthog.""" + from posthog import Posthog + + posthog = Posthog( + api_key=dm.config.posthog.api_key, + host=dm.config.posthog.host, + sync_mode=True, + super_properties={"environment": dm.config.posthog.environment}, + ) + + while True: + try: + metrics = dm.metrics_repo.get_unprocessed_metrics() + + processed_ids = [] + async for metric in metrics: + try: + posthog.capture( + distinct_id=metric.anonymous_user_id, + timestamp=metric.timestamp, + event=metric.event, + properties=metric.metadata_ or {}, + # This is sent to avoid duplicate events if multiple instances of data service are running. + # Posthog deduplicates events with the same timestamp, distinct_id, event, and uuid fields: + # https://github.com/PostHog/posthog/issues/17211#issuecomment-1723136534 + uuid=metric.id.to_uuid4(), + ) + except Exception as e: + logger.error(f"Failed to process metrics event {metric.id}: {e}") + else: + processed_ids.append(metric.id) + + await dm.metrics_repo.delete_processed_metrics(processed_ids) + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + return + else: + # NOTE: Sleep 10 seconds between processing cycles + await asyncio.sleep(10) + + +async def generate_user_namespaces(dm: DependencyManager) -> None: + """Generate namespaces for users if there are none.""" + while True: + try: + await dm.group_repo.generate_user_namespaces() + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + else: + await asyncio.sleep(dm.config.short_task_period_s) + + +async def sync_user_namespaces(dm: DependencyManager) -> None: + """Lists all user namespaces in the database and adds them to Authzed and the event queue.""" + user_namespaces = dm.group_repo._get_user_namespaces() + logger.info("Start syncing user namespaces to the authorization DB and message queue") + num_authz: int = 0 + num_events: int = 0 + num_total: int = 0 + async for user_namespace in user_namespaces: + num_total += 1 + authz_change = dm.authz._add_user_namespace(user_namespace.namespace) + session = dm.config.db.async_session_maker() + tx = session.begin() + await tx.start() + try: + await dm.authz.client.WriteRelationships(authz_change.apply) + num_authz += 1 + except Exception as err: + # NOTE: We do not rollback the authz changes here because it is OK if something is in Authz DB + # but not in the message queue but not vice-versa. + logger.error(f"Failed to sync user namespace {user_namespace} because {err}") + await tx.rollback() + else: + await tx.commit() + finally: + await session.close() + logger.info(f"Wrote authorization changes for {num_authz}/{num_total} user namespaces") + logger.info(f"Wrote to event queue database for {num_events}/{num_total} user namespaces") + + +async def bootstrap_user_namespaces(dm: DependencyManager) -> None: + """Synchronize user namespaces to the authorization database only if none are already present.""" + while True: + try: + rels = aiter( + dm.authz.client.ReadRelationships( + ReadRelationshipsRequest( + relationship_filter=RelationshipFilter( + resource_type=ResourceType.user_namespace.value, optional_relation=_Relation.owner.value + ) + ) + ) + ) + num_rels = 0 + for _ in range(5): + if await anext(rels, None) is not None: + num_rels += 1 + if num_rels >= 5: + logger.info( + "Found at least 5 user namespace in the authorization database, " + "will not sync user namespaces to authorization." + ) + return + await sync_user_namespaces(dm) + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + else: + if dm.config.dummy_stores: + # only run once in tests + return + await asyncio.sleep(dm.config.short_task_period_s) + + +async def fix_mismatched_project_namespace_ids(dm: DependencyManager) -> None: + """Fixes a problem where the project namespace relationship for projects has the wrong group ID.""" + while True: + try: + api_user = InternalServiceAdmin(id=ServiceAdminId.migrations) + res = dm.authz.client.ReadRelationships( + ReadRelationshipsRequest( + consistency=Consistency(fully_consistent=True), + relationship_filter=RelationshipFilter( + resource_type=ResourceType.project, + optional_relation=_Relation.project_namespace.value, + optional_subject_filter=SubjectFilter(subject_type=ResourceType.group.value), + ), + ) + ) + async for rel in res: + logger.info(f"Checking project namespace - group relation {rel} for correct group ID") + project_id = rel.relationship.resource.object_id + try: + project = await dm.project_repo.get_project(api_user, project_id) + except errors.MissingResourceError: + logger.info(f"Couldn't find project {project_id}, deleting relation") + await dm.authz.client.WriteRelationships( + WriteRelationshipsRequest( + updates=[ + RelationshipUpdate( + operation=RelationshipUpdate.OPERATION_DELETE, + relationship=rel.relationship, + ), + ] + ) + ) + continue + + if project.namespace.kind != NamespaceKind.group: + continue + correct_group_id = project.namespace.underlying_resource_id + authzed_group_id = rel.relationship.subject.object.object_id + if authzed_group_id != correct_group_id: + logger.info( + f"The project namespace ID in Authzed {authzed_group_id} " + f"does not match the expected group ID {correct_group_id}, correcting it..." + ) + await dm.authz.client.WriteRelationships( + WriteRelationshipsRequest( + updates=[ + RelationshipUpdate( + operation=RelationshipUpdate.OPERATION_TOUCH, + relationship=Relationship( + resource=rel.relationship.resource, + relation=rel.relationship.relation, + subject=SubjectReference( + object=ObjectReference( + object_type=ResourceType.group.value, object_id=str(correct_group_id) + ) + ), + ), + ), + RelationshipUpdate( + operation=RelationshipUpdate.OPERATION_DELETE, + relationship=rel.relationship, + ), + ] + ) + ) + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + else: + if dm.config.dummy_stores: + # only run once in tests + return + await asyncio.sleep(dm.config.short_task_period_s) + + +async def migrate_groups_make_all_public(dm: DependencyManager) -> None: + """Update existing groups to make them public.""" + while True: + try: + all_groups = dm.authz.client.ReadRelationships( + ReadRelationshipsRequest( + relationship_filter=RelationshipFilter( + resource_type=ResourceType.group.value, + optional_relation=_Relation.group_platform.value, + ) + ) + ) + all_group_ids: set[str] = set() + async for group in all_groups: + all_group_ids.add(group.relationship.resource.object_id) + logger.info(f"All groups = {len(all_group_ids)}") + logger.info(f"All groups = {all_group_ids}") + + public_groups = dm.authz.client.LookupResources( + LookupResourcesRequest( + resource_object_type=ResourceType.group.value, + permission=Scope.READ.value, + subject=SubjectReference(object=_AuthzConverter.anonymous_user()), + ) + ) + public_group_ids: set[str] = set() + async for group in public_groups: + public_group_ids.add(group.resource_object_id) + logger.info(f"Public groups = {len(public_group_ids)}") + logger.info(f"Public groups = {public_group_ids}") + + groups_to_process = all_group_ids - public_group_ids + logger.info(f"Groups to process = {groups_to_process}") + + all_users = SubjectReference(object=_AuthzConverter.all_users()) + all_anon_users = SubjectReference(object=_AuthzConverter.anonymous_users()) + for group_id in groups_to_process: + group_res = _AuthzConverter.group(ULID.from_str(group_id)) + all_users_are_viewers = Relationship( + resource=group_res, + relation=_Relation.public_viewer.value, + subject=all_users, + ) + all_anon_users_are_viewers = Relationship( + resource=group_res, + relation=_Relation.public_viewer.value, + subject=all_anon_users, + ) + authz_change = WriteRelationshipsRequest( + updates=[ + RelationshipUpdate(operation=RelationshipUpdate.OPERATION_TOUCH, relationship=rel) + for rel in [all_users_are_viewers, all_anon_users_are_viewers] + ] + ) + await dm.authz.client.WriteRelationships(authz_change) + logger.info(f"Made group {group_id} public") + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + else: + if dm.config.dummy_stores: + # only run once in tests + return + await asyncio.sleep(dm.config.short_task_period_s) + + +async def migrate_user_namespaces_make_all_public(dm: DependencyManager) -> None: + """Update existing user namespaces to make them public.""" + while True: + try: + all_user_namespaces = dm.authz.client.ReadRelationships( + ReadRelationshipsRequest( + relationship_filter=RelationshipFilter( + resource_type=ResourceType.user_namespace.value, + optional_relation=_Relation.user_namespace_platform.value, + ) + ) + ) + all_user_namespace_ids: set[str] = set() + async for ns in all_user_namespaces: + all_user_namespace_ids.add(ns.relationship.resource.object_id) + logger.info(f"All user namespaces = {len(all_user_namespace_ids)}") + logger.info(f"All user namespaces = {all_user_namespace_ids}") + + public_user_namespaces = dm.authz.client.LookupResources( + LookupResourcesRequest( + resource_object_type=ResourceType.user_namespace.value, + permission=Scope.READ.value, + subject=SubjectReference(object=_AuthzConverter.anonymous_user()), + ) + ) + public_user_namespace_ids: set[str] = set() + async for ns in public_user_namespaces: + public_user_namespace_ids.add(ns.resource_object_id) + logger.info(f"Public user namespaces = {len(public_user_namespace_ids)}") + logger.info(f"Public user namespaces = {public_user_namespace_ids}") + + namespaces_to_process = all_user_namespace_ids - public_user_namespace_ids + logger.info(f"User namespaces to process = {namespaces_to_process}") + + all_users = SubjectReference(object=_AuthzConverter.all_users()) + all_anon_users = SubjectReference(object=_AuthzConverter.anonymous_users()) + for ns_id in namespaces_to_process: + namespace_res = _AuthzConverter.user_namespace(ULID.from_str(ns_id)) + all_users_are_viewers = Relationship( + resource=namespace_res, + relation=_Relation.public_viewer.value, + subject=all_users, + ) + all_anon_users_are_viewers = Relationship( + resource=namespace_res, + relation=_Relation.public_viewer.value, + subject=all_anon_users, + ) + authz_change = WriteRelationshipsRequest( + updates=[ + RelationshipUpdate(operation=RelationshipUpdate.OPERATION_TOUCH, relationship=rel) + for rel in [all_users_are_viewers, all_anon_users_are_viewers] + ] + ) + await dm.authz.client.WriteRelationships(authz_change) + logger.info(f"Made user namespace {ns_id} public") + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + else: + if dm.config.dummy_stores: + # only run once in tests + return + await asyncio.sleep(dm.config.short_task_period_s) + + +async def users_sync(dm: DependencyManager) -> None: + """Sync all users from keycloak.""" + while True: + try: + await dm.syncer.users_sync(dm.kc_api) + + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + else: + await asyncio.sleep(dm.config.long_task_period_s) + + +async def sync_admins_from_keycloak(dm: DependencyManager) -> None: + """Sync all users from keycloak.""" + while True: + try: + await admin_sync.sync_admins_from_keycloak(dm.kc_api, dm.authz) + + except (asyncio.CancelledError, KeyboardInterrupt) as e: + logger.warning(f"Exiting: {e}") + else: + await asyncio.sleep(dm.config.long_task_period_s) + + +def all_tasks(dm: DependencyManager) -> TaskDefininions: + """A dict of task factories to be managed in main.""" + # Impl. note: We pass the entire config to the coroutines, because + # should such a task fail it will be restarted, which means the + # coroutine is re-created. In this case it might be better to also + # re-create its entire state. If we pass already created + # repositories or other services (and they are not stateless) we + # might capture this state and possibly won't recover by + # re-entering the coroutine. + return TaskDefininions( + { + "update_search": lambda: update_search(dm), + "send_product_metrics": lambda: send_metrics_to_posthog(dm), + "generate_user_namespace": lambda: generate_user_namespaces(dm), + "bootstrap_user_namespaces": lambda: bootstrap_user_namespaces(dm), + "fix_mismatched_project_namespace_ids": lambda: fix_mismatched_project_namespace_ids(dm), + "migrate_groups_make_all_public": lambda: migrate_groups_make_all_public(dm), + "migrate_user_namespaces_make_all_public": lambda: migrate_user_namespaces_make_all_public(dm), + "users_sync": lambda: users_sync(dm), + "sync_admins_from_keycloak": lambda: sync_admins_from_keycloak(dm), + } + ) diff --git a/bases/renku_data_services/data_tasks/taskman.py b/bases/renku_data_services/data_tasks/taskman.py new file mode 100644 index 000000000..24abb48aa --- /dev/null +++ b/bases/renku_data_services/data_tasks/taskman.py @@ -0,0 +1,205 @@ +"""A simple task manager.""" + +from __future__ import annotations + +import asyncio +import math +import sys +from asyncio.tasks import Task +from collections.abc import Callable, Coroutine, Iterator +from dataclasses import dataclass +from datetime import datetime, timedelta +from typing import Any, final + +from renku_data_services.app_config import logging + +logger = logging.getLogger(__name__) + +type TaskFactory = Callable[[], Coroutine[Any, Any, None]] +"""A function creating a coroutine.""" + + +@final +class TaskDefininions: + """Task definitions.""" + + def __init__(self, defs: dict[str, TaskFactory]) -> None: + self.__task_defs = defs + + @classmethod + def single(cls, name: str, tf: TaskFactory) -> TaskDefininions: + """Create a TaskDefinition for the given single task.""" + return TaskDefininions({name: tf}) + + @property + def tasks(self) -> Iterator[tuple[str, TaskFactory]]: + """Return the set of tasks.""" + return iter(self.__task_defs.items()) + + def merge(self, other: TaskDefininions) -> TaskDefininions: + """Create a new definition merging this with other.""" + return TaskDefininions(self.__task_defs | other.__task_defs) + + +@final +@dataclass(frozen=True) +class TaskView: + """Information about a running task.""" + + name: str + started: datetime + restarts: int + + +@final +@dataclass +class _TaskContext: + """Information (internal) about a running task.""" + + name: str + task: Task[None] + started: datetime + restarts: int + + def inc_restarts(self) -> None: + """Increments the restart counter.""" + self.restarts = self.restarts + 1 + + def reset_restarts(self) -> None: + """Resets the restarts counter to 0.""" + self.restarts = 0 + + def running_time(self, ref: datetime | None = None) -> timedelta: + """Return the time the task is running.""" + if ref is None: + ref = datetime.now() + return ref - self.started + + def to_view(self) -> TaskView: + """Convert this into a view object.""" + return TaskView(self.name, self.started, self.restarts) + + +@final +class TaskJoin: + """Used to wait for a task to finish.""" + + def __init__(self, tm: TaskManager, name: str) -> None: + self.__task_manager = tm + self.__task_name = name + + def get_view(self) -> TaskView | None: + """Return the current task view.""" + return self.__task_manager.get_task_view(self.__task_name) + + async def join(self, max_wait: float) -> None: + """Wait for this task to finish execution.""" + tv = self.get_view() + counter: int = 0 + max_count: int = sys.maxsize if max_wait <= 0 else math.ceil(max_wait / 0.1) + while tv is not None: + await asyncio.sleep(0.1) + tv = self.get_view() + counter += 1 + if counter >= max_count: + raise TimeoutError(f"Task is still running, after {max_wait}s") + + +@final +class TaskManager: + """Maintains state for currently running tasks associated by their name.""" + + def __init__(self, max_retry_wait_seconds: int) -> None: + self.__running_tasks: dict[str, _TaskContext] = {} + self.__max_retry_wait_seconds = max_retry_wait_seconds + + def start_all(self, task_defs: TaskDefininions, start_time: datetime | None = None) -> None: + """Registers all tasks.""" + if start_time is None: + start_time = datetime.now() + now = start_time + for name, tf in task_defs.tasks: + self.start(name, tf, now) + + def start(self, name: str, tf: TaskFactory, now: datetime | None = None) -> None: + """Start a task associated to the given name.""" + if now is None: + now = datetime.now() + if name in self.__running_tasks: + logger.warning(f"{name}: not starting task, it is already running.") + else: + self.__start(name, tf, now) + + def __start(self, name: str, tf: TaskFactory, now: datetime) -> None: + wt = self.__wrap_task(name, tf) + logger.info(f"{name}: Starting...") + t = asyncio.create_task(wt, name=name) + ctx = _TaskContext(name=name, task=t, started=now, restarts=0) + self.__running_tasks.update({name: ctx}) + t.add_done_callback(lambda tt: self.__remove_running(tt.get_name())) + + def current_tasks(self) -> list[TaskView]: + """Return a list of currently running tasks.""" + return [e.to_view() for e in self.__running_tasks.values()] + + def get_task_view(self, name: str) -> TaskView | None: + """Return information about a currently running task.""" + t = self.__running_tasks.get(name) + if t is not None: + return t.to_view() + else: + return None + + def get_task_join(self, name: str) -> TaskJoin: + """Returns a TaskJoin object for the given task.""" + return TaskJoin(self, name) + + def reset_restarts(self, name: str) -> None: + """Resets the restarts counter to 0.""" + tc = self.__running_tasks.get(name) + if tc is not None: + tc.reset_restarts() + + def cancel(self, name: str) -> TaskJoin | None: + """Cancel the task with the given name. + + Return a `TaskJoin` object if the task is currently running + and requested to cancel, `None` if there is no task with the + given name. + """ + t = self.__running_tasks.get(name) + if t is None: + return None + else: + logger.info(f"{t.name}: cancelling task") + t.task.cancel() + return self.get_task_join(name) + + def __remove_running(self, name: str) -> None: + v = self.__running_tasks.pop(name, None) + if v is None: + logger.warning(f"Task {name} was expected in running state, but is not found.") + else: + logger.debug(f"{name}: removed from running set") + + async def __wrap_task(self, name: str, tf: TaskFactory) -> None: + while True: + try: + await tf() + ctx = self.__running_tasks.get(name) + if ctx is not None: + logger.info(f"{name}: Finished in {ctx.running_time().seconds}s") + break + except Exception as e: + ctx = self.__running_tasks.get(name) + restarts = 0 + if ctx is not None: + restarts = ctx.restarts + ctx.inc_restarts() + + secs = min(pow(2, restarts), self.__max_retry_wait_seconds) + logger.error( + f"{name}: Failed with {e}. Restarting it in {secs} seconds for the {restarts + 1}. time.", + exc_info=e, + ) + await asyncio.sleep(secs) diff --git a/bases/renku_data_services/data_tasks/tcp_handler.py b/bases/renku_data_services/data_tasks/tcp_handler.py new file mode 100644 index 000000000..b77837b62 --- /dev/null +++ b/bases/renku_data_services/data_tasks/tcp_handler.py @@ -0,0 +1,63 @@ +"""Handling the tcp connections.""" + +import re +from asyncio.streams import StreamReader, StreamWriter + +from renku_data_services.data_tasks.taskman import TaskManager + + +class TcpHandler: + """Handles the simple tcp connection.""" + + def __init__(self, tm: TaskManager) -> None: + self.__task_manager = tm + + async def _write_line(self, writer: StreamWriter, line: str) -> None: + try: + writer.write(str.encode(f"{line}\r\n")) + await writer.drain() + except Exception: + pass # nosec B110 + + async def _read_line(self, reader: StreamReader) -> tuple[str, list[str]]: + try: + data = await reader.read(100) + msg = data.decode().strip() + parts = re.split("\\s+", msg) + return (parts[0].lower(), parts[1:]) + except Exception: + return ("", []) + + async def run(self, reader: StreamReader, writer: StreamWriter) -> None: + """Handles a tcp connection.""" + await self._write_line(writer, "Hello, write `help` for help.") + + while True: + (cmd, rest) = await self._read_line(reader) + match cmd: + case "help": + await self._write_line( + writer, + ( + "Commands\r\n" + "- help: this help text\r\n" + "- tasks: list tasks\r\n" + "- reset_restarts [name]: reset the restarts counter" + ), + ) + + case "tasks": + for t in self.__task_manager.current_tasks(): + await self._write_line(writer, f"- {t.name}: since {t.started} ({t.restarts} restarts)") + + case "reset_restarts": + if rest != []: + self.__task_manager.reset_restarts(rest[0]) + else: + for t in self.__task_manager.current_tasks(): + self.__task_manager.reset_restarts(t.name) + await self._write_line(writer, "Ok") + + case _: + await self._write_line(writer, "Good Bye.") + break diff --git a/bases/renku_data_services/k8s_cache/__init__.py b/bases/renku_data_services/k8s_cache/__init__.py new file mode 100644 index 000000000..15a5501a1 --- /dev/null +++ b/bases/renku_data_services/k8s_cache/__init__.py @@ -0,0 +1,5 @@ +"""Kubernetes Cache.""" + +import renku_data_services.app_config.logging as logging + +logging.configure_logging() diff --git a/bases/renku_data_services/k8s_cache/config.py b/bases/renku_data_services/k8s_cache/config.py new file mode 100644 index 000000000..a9e4d7f20 --- /dev/null +++ b/bases/renku_data_services/k8s_cache/config.py @@ -0,0 +1,76 @@ +"""K8s cache config.""" + +from dataclasses import dataclass +from typing import Self + +from kubernetes.client.api_client import os + +from renku_data_services.db_config.config import DBConfig + + +@dataclass +class _K8sConfig: + """Defines the k8s client and namespace.""" + + # This is used only for the main/local/default cluster + renku_namespace: str + kube_config_root: str + + @classmethod + def from_env(cls) -> Self: + return cls( + renku_namespace=os.environ.get("KUBERNETES_NAMESPACE", "default"), + kube_config_root=os.environ.get("K8S_CONFIGS_ROOT", "/secrets/kube_configs"), + ) + + +@dataclass +class _MetricsConfig: + """Configuration for metrics.""" + + enabled: bool + + @classmethod + def from_env(cls) -> "_MetricsConfig": + """Create metrics config from environment variables.""" + enabled = os.environ.get("POSTHOG_ENABLED", "false").lower() == "true" + return cls(enabled) + + +@dataclass +class _ImageBuilderConfig: + """Configuration for image builders.""" + + enabled: bool + + @classmethod + def from_env(cls) -> "_ImageBuilderConfig": + """Load values from environment variables.""" + enabled = os.environ.get("IMAGE_BUILDERS_ENABLED", "false").lower() == "true" + return cls(enabled=enabled) + + +@dataclass +class Config: + """K8s cache config.""" + + db: DBConfig + k8s: _K8sConfig + metrics: _MetricsConfig + image_builders: _ImageBuilderConfig + + @classmethod + def from_env(cls) -> "Config": + """Create a config from environment variables.""" + db = DBConfig.from_env() + k8s = _K8sConfig.from_env() + metrics = _MetricsConfig.from_env() + + image_builders = _ImageBuilderConfig.from_env() + + return cls( + db=db, + k8s=k8s, + metrics=metrics, + image_builders=image_builders, + ) diff --git a/bases/renku_data_services/k8s_cache/dependencies.py b/bases/renku_data_services/k8s_cache/dependencies.py new file mode 100644 index 000000000..6f934cf34 --- /dev/null +++ b/bases/renku_data_services/k8s_cache/dependencies.py @@ -0,0 +1,80 @@ +"""Dependency management for k8s cache.""" + +from dataclasses import dataclass, field + +from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository +from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient +from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s_cache.config import Config +from renku_data_services.k8s_watcher.db import K8sDbCache +from renku_data_services.metrics.core import StagingMetricsService +from renku_data_services.metrics.db import MetricsRepository + + +@dataclass +class DependencyManager: + """K8s cache config.""" + + config: Config + + quota_repo: QuotaRepository + _k8s_cache: K8sDbCache | None = None + _metrics_repo: MetricsRepository | None = field(default=None, repr=False, init=False) + _metrics: StagingMetricsService | None = field(default=None, repr=False, init=False) + _rp_repo: ResourcePoolRepository | None = field(default=None, repr=False, init=False) + _cluster_repo: ClusterRepository | None = field(default=None, repr=False, init=False) + + @property + def metrics_repo(self) -> MetricsRepository: + """The DB adapter for metrics.""" + if not self._metrics_repo: + self._metrics_repo = MetricsRepository(session_maker=self.config.db.async_session_maker) + return self._metrics_repo + + @property + def metrics(self) -> StagingMetricsService: + """The metrics service interface.""" + if not self._metrics: + self._metrics = StagingMetricsService(enabled=self.config.metrics.enabled, metrics_repo=self.metrics_repo) + return self._metrics + + @property + def rp_repo(self) -> ResourcePoolRepository: + """The resource pool repository.""" + if not self._rp_repo: + self._rp_repo = ResourcePoolRepository( + session_maker=self.config.db.async_session_maker, quotas_repo=self.quota_repo + ) + return self._rp_repo + + def cluster_repo(self) -> ClusterRepository: + """The resource pool repository.""" + if not self._cluster_repo: + self._cluster_repo = ClusterRepository(session_maker=self.config.db.async_session_maker) + return self._cluster_repo + + @property + def k8s_cache(self) -> K8sDbCache: + """The DB adapter for the k8s cache.""" + if not self._k8s_cache: + self._k8s_cache = K8sDbCache( + session_maker=self.config.db.async_session_maker, + ) + return self._k8s_cache + + @classmethod + def from_env(cls) -> "DependencyManager": + """Create a config from environment variables.""" + config = Config.from_env() + + # NOTE: We only need the QuotaRepository to instantiate the ResourcePoolRepository which is used to get + # the resource class and pool information for metrics. We don't need quota information for metrics at all + # so we use the dummy client for quotas here as we don't actually access k8s, just the db. + quota_repo = QuotaRepository( + DummyCoreClient({}, {}), DummySchedulingClient({}), namespace=config.k8s.renku_namespace + ) + + return cls( + config=config, + quota_repo=quota_repo, + ) diff --git a/bases/renku_data_services/k8s_cache/main.py b/bases/renku_data_services/k8s_cache/main.py new file mode 100644 index 000000000..5ce82e3d8 --- /dev/null +++ b/bases/renku_data_services/k8s_cache/main.py @@ -0,0 +1,49 @@ +"""The entrypoint for the k8s cache service.""" + +import asyncio + +import kr8s + +from renku_data_services.app_config import logging +from renku_data_services.k8s.config import get_clusters +from renku_data_services.k8s_cache.dependencies import DependencyManager +from renku_data_services.k8s_watcher import K8sWatcher, k8s_object_handler +from renku_data_services.notebooks.constants import AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK +from renku_data_services.session.constants import BUILD_RUN_GVK, TASK_RUN_GVK + +logger = logging.getLogger(__name__) + + +async def main() -> None: + """K8s cache entrypoint.""" + + dm = DependencyManager.from_env() + + kr8s_api = await kr8s.asyncio.api() + + clusters = await get_clusters( + kube_conf_root_dir=dm.config.k8s.kube_config_root, + namespace=dm.config.k8s.renku_namespace, + api=kr8s_api, + cluster_rp=dm.cluster_repo(), + ) + + kinds = [AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK] + if dm.config.image_builders.enabled: + kinds.extend([BUILD_RUN_GVK, TASK_RUN_GVK]) + watcher = K8sWatcher( + handler=k8s_object_handler(dm.k8s_cache, dm.metrics, rp_repo=dm.rp_repo), + clusters={c.id: c for c in clusters}, + kinds=kinds, + db_cache=dm.k8s_cache, + ) + await watcher.start() + logger.info("started watching resources") + # create file for liveness probe + with open("/tmp/cache_ready", "w") as f: # nosec B108 + f.write("ready") + await watcher.wait() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/components/renku_data_services/message_queue/avro_models/__init__.py b/bases/renku_data_services/k8s_cache/py.typed similarity index 100% rename from components/renku_data_services/message_queue/avro_models/__init__.py rename to bases/renku_data_services/k8s_cache/py.typed diff --git a/bases/renku_data_services/secrets_storage_api/__init__.py b/bases/renku_data_services/secrets_storage_api/__init__.py index 5d7888506..fdc4c0f85 100644 --- a/bases/renku_data_services/secrets_storage_api/__init__.py +++ b/bases/renku_data_services/secrets_storage_api/__init__.py @@ -1 +1,5 @@ """Secrets storage.""" + +import renku_data_services.app_config.logging as logging + +logging.configure_logging() diff --git a/bases/renku_data_services/secrets_storage_api/app.py b/bases/renku_data_services/secrets_storage_api/app.py index c8793a76e..00d44cbcc 100644 --- a/bases/renku_data_services/secrets_storage_api/app.py +++ b/bases/renku_data_services/secrets_storage_api/app.py @@ -6,22 +6,22 @@ from renku_data_services.base_api.misc import MiscBP from renku_data_services.secrets import apispec from renku_data_services.secrets.blueprints import K8sSecretsBP -from renku_data_services.secrets.config import Config +from renku_data_services.secrets_storage_api.dependencies import DependencyManager -def register_all_handlers(app: Sanic, config: Config) -> Sanic: +def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: """Register all handlers on the application.""" url_prefix = "/api/secrets" secrets_storage = K8sSecretsBP( name="secrets_storage_api", url_prefix=url_prefix, - user_secrets_repo=config.user_secrets_repo, - authenticator=config.authenticator, - secret_service_private_key=config.secrets_service_private_key, - previous_secret_service_private_key=config.previous_secrets_service_private_key, - core_client=config.core_client, + user_secrets_repo=dm.user_secrets_repo, + authenticator=dm.authenticator, + secret_service_private_key=dm.config.secrets.private_key, + previous_secret_service_private_key=dm.config.secrets.previous_private_key, + core_client=dm.core_client, ) - misc = MiscBP(name="misc", url_prefix=url_prefix, apispec=config.spec, version=config.version) + misc = MiscBP(name="misc", url_prefix=url_prefix, apispec=dm.config.spec, version=dm.config.version) app.blueprint([secrets_storage.blueprint(), misc.blueprint()]) app.error_handler = CustomErrorHandler(apispec) diff --git a/bases/renku_data_services/secrets_storage_api/config.py b/bases/renku_data_services/secrets_storage_api/config.py new file mode 100644 index 000000000..64ee50176 --- /dev/null +++ b/bases/renku_data_services/secrets_storage_api/config.py @@ -0,0 +1,54 @@ +"""Secrets storage configuration.""" + +import os +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Self + +from yaml import safe_load + +import renku_data_services.secrets +from renku_data_services.app_config import logging +from renku_data_services.app_config.config import KeycloakConfig +from renku_data_services.db_config.config import DBConfig +from renku_data_services.secrets.config import PrivateSecretsConfig + + +@dataclass +class Config: + """Main config for secrets service.""" + + db: DBConfig + secrets: PrivateSecretsConfig + keycloak: KeycloakConfig | None + app_name: str = "secrets_storage" + version: str = "0.0.1" + dummy_stores: bool = False + spec: dict[str, Any] = field(init=False, default_factory=dict) + log_cfg: logging.Config = field(default_factory=logging.Config.from_env) + + def __post_init__(self) -> None: + spec_file = Path(renku_data_services.secrets.__file__).resolve().parent / "api.spec.yaml" + with open(spec_file) as f: + self.spec = safe_load(f) + + @classmethod + def from_env(cls) -> Self: + """Load values from environment.""" + dummy_stores = os.environ.get("DUMMY_STORES", "false").lower() == "true" + db = DBConfig.from_env() + secrets_config = PrivateSecretsConfig.from_env() + version = os.environ.get("VERSION", "0.0.1") + keycloak = None + if not dummy_stores: + keycloak = KeycloakConfig.from_env() + log_cfg = logging.Config.from_env() + + return cls( + db=db, + secrets=secrets_config, + version=version, + keycloak=keycloak, + dummy_stores=dummy_stores, + log_cfg=log_cfg, + ) diff --git a/bases/renku_data_services/secrets_storage_api/dependencies.py b/bases/renku_data_services/secrets_storage_api/dependencies.py new file mode 100644 index 000000000..dd3c684c5 --- /dev/null +++ b/bases/renku_data_services/secrets_storage_api/dependencies.py @@ -0,0 +1,64 @@ +"""Dependencies management of secrets storage.""" + +from dataclasses import dataclass, field + +from jwt import PyJWKClient + +from renku_data_services import base_models, errors +from renku_data_services.authn.dummy import DummyAuthenticator +from renku_data_services.authn.keycloak import KeycloakAuthenticator +from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface +from renku_data_services.k8s.clients import DummyCoreClient, K8sCoreClient +from renku_data_services.secrets.db import LowLevelUserSecretsRepo +from renku_data_services.secrets_storage_api.config import Config +from renku_data_services.utils.core import oidc_discovery + + +@dataclass +class DependencyManager: + """Dependencies for secrets service.""" + + authenticator: base_models.Authenticator + config: Config + core_client: K8sCoreClientInterface + _user_secrets_repo: LowLevelUserSecretsRepo | None = field(default=None, repr=False, init=False) + + @property + def user_secrets_repo(self) -> LowLevelUserSecretsRepo: + """The DB adapter for users.""" + if not self._user_secrets_repo: + self._user_secrets_repo = LowLevelUserSecretsRepo( + session_maker=self.config.db.async_session_maker, + ) + return self._user_secrets_repo + + @classmethod + def from_env(cls) -> "DependencyManager": + """Create a config from environment variables.""" + authenticator: base_models.Authenticator + core_client: K8sCoreClientInterface + config = Config.from_env() + + if config.dummy_stores: + authenticator = DummyAuthenticator() + core_client = DummyCoreClient({}, {}) + else: + assert config.keycloak is not None + oidc_disc_data = oidc_discovery(config.keycloak.url, config.keycloak.realm) + jwks_url = oidc_disc_data.get("jwks_uri") + if jwks_url is None: + raise errors.ConfigurationError( + message="The JWKS url for Keycloak cannot be found from the OIDC discovery endpoint." + ) + jwks = PyJWKClient(jwks_url) + if config.keycloak.algorithms is None: + raise errors.ConfigurationError(message="At least one token signature algorithm is required.") + + authenticator = KeycloakAuthenticator(jwks=jwks, algorithms=config.keycloak.algorithms) + core_client = K8sCoreClient() + + return cls( + config=config, + authenticator=authenticator, + core_client=core_client, + ) diff --git a/bases/renku_data_services/secrets_storage_api/main.py b/bases/renku_data_services/secrets_storage_api/main.py index 69ed1de9d..29234f8df 100644 --- a/bases/renku_data_services/secrets_storage_api/main.py +++ b/bases/renku_data_services/secrets_storage_api/main.py @@ -6,33 +6,48 @@ from typing import Any from prometheus_sanic import monitor -from sanic import Sanic +from sanic import Request, Sanic +from sanic.response import BaseHTTPResponse from sanic.worker.loader import AppLoader +from renku_data_services.app_config import logging from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId -from renku_data_services.secrets.config import Config from renku_data_services.secrets.core import rotate_encryption_keys from renku_data_services.secrets_storage_api.app import register_all_handlers +from renku_data_services.secrets_storage_api.dependencies import DependencyManager def create_app() -> Sanic: """Create a Sanic application.""" - config = Config.from_env() - app = Sanic(config.app_name) + dm = DependencyManager.from_env() + app = Sanic(dm.config.app_name) if "COVERAGE_RUN" in environ: app.config.TOUCHUP = False - app = register_all_handlers(app, config) + app = register_all_handlers(app, dm) + + @app.on_request + async def set_request_id(request: Request) -> None: + logging.set_request_id(str(request.id)) + + @app.middleware("response") + async def set_request_id_header(request: Request, response: BaseHTTPResponse) -> None: + response.headers["X-Request-ID"] = request.id @app.main_process_start def main_process_start(app: Sanic) -> None: app.shared_ctx.rotation_lock = Lock() + logging.configure_logging(dm.config.log_cfg) + + @app.before_server_start + async def logging_setup1(app: Sanic) -> None: + logging.configure_logging(dm.config.log_cfg) # Setup prometheus monitor(app, endpoint_type="url", multiprocess_mode="all", is_middleware=True).expose_endpoint() async def rotate_encryption_key_listener(app: Sanic) -> None: """Rotate RSA private key.""" - if config.previous_secrets_service_private_key is None: + if dm.config.secrets.previous_private_key is None: return lock = app.shared_ctx.rotation_lock.acquire(block=False) @@ -43,9 +58,9 @@ async def rotate_encryption_key_listener(app: Sanic) -> None: try: await rotate_encryption_keys( InternalServiceAdmin(id=ServiceAdminId.secrets_rotation), - config.secrets_service_private_key, - config.previous_secrets_service_private_key, - config.user_secrets_repo, + dm.config.secrets.private_key, + dm.config.secrets.previous_private_key, + dm.user_secrets_repo, ) finally: app.shared_ctx.rotation_lock.release() diff --git a/chartpress.yaml b/chartpress.yaml index 2dde9dfd3..f7c2886bc 100644 --- a/chartpress.yaml +++ b/chartpress.yaml @@ -15,11 +15,15 @@ charts: contextPath: . dockerfilePath: projects/renku_data_service/Dockerfile valuesPath: dataService.image - data-service-background-jobs: + data-service-data-tasks: contextPath: . - dockerfilePath: projects/background_jobs/Dockerfile - valuesPath: dataService.backgroundJobs.image + dockerfilePath: projects/renku_data_tasks/Dockerfile + valuesPath: dataService.dataTasks.image secrets-storage: contextPath: . dockerfilePath: projects/secrets_storage/Dockerfile valuesPath: secretsStorage.image + data-service-k8s-watcher: + contextPath: . + dockerfilePath: projects/k8s_watcher/Dockerfile + valuesPath: dataService.k8sWatcher.image diff --git a/components/renku_data_services/app_config/__init__.py b/components/renku_data_services/app_config/__init__.py index 584e69b35..d2f88bdfa 100644 --- a/components/renku_data_services/app_config/__init__.py +++ b/components/renku_data_services/app_config/__init__.py @@ -1,3 +1 @@ """Configuration parsing utilities.""" - -from renku_data_services.app_config.config import Config # noqa: F401 diff --git a/components/renku_data_services/app_config/config.py b/components/renku_data_services/app_config/config.py index fba76ef17..3d31afcc5 100644 --- a/components/renku_data_services/app_config/config.py +++ b/components/renku_data_services/app_config/config.py @@ -9,95 +9,43 @@ instantiated multiple times without creating multiple database connections. """ -import functools import os -import secrets -from dataclasses import dataclass, field -from pathlib import Path -from typing import Any, Optional +from dataclasses import dataclass -from authlib.integrations.httpx_client import AsyncOAuth2Client -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives.asymmetric.types import PublicKeyTypes -from jwt import PyJWKClient -from yaml import safe_load - -import renku_data_services.base_models as base_models -import renku_data_services.connected_services -import renku_data_services.crc -import renku_data_services.data_connectors -import renku_data_services.platform -import renku_data_services.repositories -import renku_data_services.storage -import renku_data_services.users from renku_data_services import errors -from renku_data_services.authn.dummy import DummyAuthenticator, DummyUserStore -from renku_data_services.authn.gitlab import GitlabAuthenticator -from renku_data_services.authn.keycloak import KcUserStore, KeycloakAuthenticator -from renku_data_services.authz.authz import Authz -from renku_data_services.authz.config import AuthzConfig -from renku_data_services.connected_services.db import ConnectedServicesRepository -from renku_data_services.crc import models -from renku_data_services.crc.db import ResourcePoolRepository, UserRepository -from renku_data_services.data_api.server_options import ( - ServerOptions, - ServerOptionsDefaults, - generate_default_resource_pool, -) -from renku_data_services.data_connectors.db import ( - DataConnectorProjectLinkRepository, - DataConnectorRepository, - DataConnectorSecretRepository, -) -from renku_data_services.db_config import DBConfig -from renku_data_services.git.gitlab import DummyGitlabAPI, GitlabAPI -from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient, K8sCoreClient, K8sSchedulingClient -from renku_data_services.k8s.quota import QuotaRepository -from renku_data_services.message_queue.config import RedisConfig -from renku_data_services.message_queue.db import EventRepository, ReprovisioningRepository -from renku_data_services.message_queue.interface import IMessageQueue -from renku_data_services.message_queue.redis_queue import RedisQueue -from renku_data_services.namespace.db import GroupRepository -from renku_data_services.notebooks.config import NotebooksConfig -from renku_data_services.platform.db import PlatformRepository -from renku_data_services.project.db import ProjectMemberRepository, ProjectRepository -from renku_data_services.repositories.db import GitRepositoriesRepository -from renku_data_services.secrets.db import UserSecretsRepo -from renku_data_services.session.db import SessionRepository -from renku_data_services.storage.db import StorageRepository -from renku_data_services.users.config import UserPreferencesConfig -from renku_data_services.users.db import UserPreferencesRepository -from renku_data_services.users.db import UserRepo as KcUserRepo -from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI -from renku_data_services.users.kc_api import IKeycloakAPI, KeycloakAPI -from renku_data_services.users.models import UnsavedUserInfo -from renku_data_services.utils.core import merge_api_specs, oidc_discovery -default_resource_pool = models.ResourcePool( - name="default", - classes=[ - models.ResourceClass( - name="small", - cpu=0.5, - memory=1, - max_storage=20, - gpu=0, - default=True, - ), - models.ResourceClass( - name="large", - cpu=1.0, - memory=2, - max_storage=20, - gpu=0, - default=False, - ), - ], - quota=None, - public=True, - default=True, -) + +@dataclass +class KeycloakConfig: + """Configuration values for keycloak.""" + + url: str + realm: str + client_id: str + client_secret: str + algorithms: list[str] | None + + @classmethod + def from_env(cls) -> "KeycloakConfig": + """Load config from environment values.""" + url = os.environ.get("KEYCLOAK_URL") + if url is None: + raise errors.ConfigurationError(message="The Keycloak URL has to be specified.") + url = url.rstrip("/") + realm = os.environ.get("KEYCLOAK_REALM", "Renku") + client_id = os.environ["KEYCLOAK_CLIENT_ID"] + client_secret = os.environ["KEYCLOAK_CLIENT_SECRET"] + algorithms = os.environ.get("KEYCLOAK_TOKEN_SIGNATURE_ALGS") + algorithms_lst = None + if algorithms is not None: + algorithms_lst = [i.strip() for i in algorithms.split(",")] + return cls( + url=url, + realm=realm, + client_id=client_id, + client_secret=client_secret, + algorithms=algorithms_lst, + ) @dataclass @@ -110,472 +58,40 @@ class SentryConfig: sample_rate: float = 0.2 @classmethod - def from_env(cls, prefix: str = "") -> "SentryConfig": + def from_env(cls) -> "SentryConfig": """Create a config from environment variables.""" - enabled = os.environ.get(f"{prefix}SENTRY_ENABLED", "false").lower() == "true" - dsn = os.environ.get(f"{prefix}SENTRY_DSN", "") - environment = os.environ.get(f"{prefix}SENTRY_ENVIRONMENT", "") - sample_rate = float(os.environ.get(f"{prefix}SENTRY_SAMPLE_RATE", "0.2")) + enabled = os.environ.get("SENTRY_ENABLED", "false").lower() == "true" + dsn = os.environ.get("SENTRY_DSN", "") + environment = os.environ.get("SENTRY_ENVIRONMENT", "") + sample_rate = float(os.environ.get("SENTRY_SAMPLE_RATE", "0.2")) return cls(enabled, dsn=dsn, environment=environment, sample_rate=sample_rate) @dataclass -class TrustedProxiesConfig: - """Configuration for trusted reverse proxies.""" +class PosthogConfig: + """Configuration for posthog.""" - proxies_count: int | None = None - real_ip_header: str | None = None + enabled: bool @classmethod - def from_env(cls, prefix: str = "") -> "TrustedProxiesConfig": - """Create a config from environment variables.""" - proxies_count = int(os.environ.get(f"{prefix}PROXIES_COUNT") or "0") - real_ip_header = os.environ.get(f"{prefix}REAL_IP_HEADER") - return cls(proxies_count=proxies_count or None, real_ip_header=real_ip_header or None) + def from_env(cls) -> "PosthogConfig": + """Create posthog config from environment variables.""" + enabled = os.environ.get("POSTHOG_ENABLED", "false").lower() == "true" + return cls(enabled) -@dataclass -class Config: - """Configuration for the Data service.""" - - user_store: base_models.UserStore - authenticator: base_models.Authenticator - gitlab_authenticator: base_models.Authenticator - quota_repo: QuotaRepository - user_preferences_config: UserPreferencesConfig - db: DBConfig - redis: RedisConfig - sentry: SentryConfig - trusted_proxies: TrustedProxiesConfig - gitlab_client: base_models.GitlabAPIProtocol - kc_api: IKeycloakAPI - message_queue: IMessageQueue - gitlab_url: str | None - nb_config: NotebooksConfig - - secrets_service_public_key: rsa.RSAPublicKey - """The public key of the secrets service, used to encrypt user secrets that only it can decrypt.""" - encryption_key: bytes = field(repr=False) - """The encryption key to encrypt user keys at rest in the database.""" - - authz_config: AuthzConfig = field(default_factory=lambda: AuthzConfig.from_env()) - spec: dict[str, Any] = field(init=False, repr=False, default_factory=dict) - version: str = "0.0.1" - app_name: str = "renku_data_services" - default_resource_pool_file: Optional[str] = None - default_resource_pool: models.ResourcePool = default_resource_pool - server_options_file: Optional[str] = None - server_defaults_file: Optional[str] = None - async_oauth2_client_class: type[AsyncOAuth2Client] = AsyncOAuth2Client - _user_repo: UserRepository | None = field(default=None, repr=False, init=False) - _rp_repo: ResourcePoolRepository | None = field(default=None, repr=False, init=False) - _storage_repo: StorageRepository | None = field(default=None, repr=False, init=False) - _project_repo: ProjectRepository | None = field(default=None, repr=False, init=False) - _group_repo: GroupRepository | None = field(default=None, repr=False, init=False) - _event_repo: EventRepository | None = field(default=None, repr=False, init=False) - _reprovisioning_repo: ReprovisioningRepository | None = field(default=None, repr=False, init=False) - _session_repo: SessionRepository | None = field(default=None, repr=False, init=False) - _user_preferences_repo: UserPreferencesRepository | None = field(default=None, repr=False, init=False) - _kc_user_repo: KcUserRepo | None = field(default=None, repr=False, init=False) - _user_secrets_repo: UserSecretsRepo | None = field(default=None, repr=False, init=False) - _project_member_repo: ProjectMemberRepository | None = field(default=None, repr=False, init=False) - _connected_services_repo: ConnectedServicesRepository | None = field(default=None, repr=False, init=False) - _git_repositories_repo: GitRepositoriesRepository | None = field(default=None, repr=False, init=False) - _platform_repo: PlatformRepository | None = field(default=None, repr=False, init=False) - _data_connector_repo: DataConnectorRepository | None = field(default=None, repr=False, init=False) - _data_connector_to_project_link_repo: DataConnectorProjectLinkRepository | None = field( - default=None, repr=False, init=False - ) - _data_connector_secret_repo: DataConnectorSecretRepository | None = field(default=None, repr=False, init=False) - - @staticmethod - @functools.cache - def load_apispec() -> dict[str, Any]: - """Load apispec with caching. - - Note: loading these files takes quite some time and is repeated for each test. Having - them cached in this method reduces that time significantly. - """ - # NOTE: Read spec files required for Swagger - spec_file = Path(renku_data_services.crc.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - crc_spec = safe_load(f) - - spec_file = Path(renku_data_services.storage.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - storage_spec = safe_load(f) - - spec_file = Path(renku_data_services.users.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - users = safe_load(f) - - spec_file = Path(renku_data_services.project.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - projects = safe_load(f) - - spec_file = Path(renku_data_services.namespace.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - groups = safe_load(f) - - spec_file = Path(renku_data_services.session.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - sessions = safe_load(f) - - spec_file = Path(renku_data_services.connected_services.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - connected_services = safe_load(f) - - spec_file = Path(renku_data_services.repositories.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - repositories = safe_load(f) - - spec_file = Path(renku_data_services.notebooks.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - repositories = safe_load(f) - - spec_file = Path(renku_data_services.platform.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - platform = safe_load(f) - - spec_file = Path(renku_data_services.message_queue.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - search = safe_load(f) - - spec_file = Path(renku_data_services.data_connectors.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - data_connectors = safe_load(f) - - return merge_api_specs( - crc_spec, - storage_spec, - users, - projects, - groups, - sessions, - connected_services, - repositories, - platform, - search, - data_connectors, - ) - def __post_init__(self) -> None: - self.spec = self.load_apispec() - - if self.default_resource_pool_file is not None: - with open(self.default_resource_pool_file) as f: - self.default_resource_pool = models.ResourcePool.from_dict(safe_load(f)) - if self.server_defaults_file is not None and self.server_options_file is not None: - with open(self.server_options_file) as f: - options = ServerOptions.model_validate(safe_load(f)) - with open(self.server_defaults_file) as f: - defaults = ServerOptionsDefaults.model_validate(safe_load(f)) - self.default_resource_pool = generate_default_resource_pool(options, defaults) - - self.authz = Authz(self.authz_config) - - @property - def user_repo(self) -> UserRepository: - """The DB adapter for users.""" - if not self._user_repo: - self._user_repo = UserRepository( - session_maker=self.db.async_session_maker, quotas_repo=self.quota_repo, user_repo=self.kc_user_repo - ) - return self._user_repo - - @property - def rp_repo(self) -> ResourcePoolRepository: - """The DB adapter for resource pools.""" - if not self._rp_repo: - self._rp_repo = ResourcePoolRepository( - session_maker=self.db.async_session_maker, quotas_repo=self.quota_repo - ) - return self._rp_repo - - @property - def storage_repo(self) -> StorageRepository: - """The DB adapter for V1 cloud storage configs.""" - if not self._storage_repo: - self._storage_repo = StorageRepository( - session_maker=self.db.async_session_maker, - gitlab_client=self.gitlab_client, - user_repo=self.kc_user_repo, - secret_service_public_key=self.secrets_service_public_key, - ) - return self._storage_repo - - @property - def event_repo(self) -> EventRepository: - """The DB adapter for cloud event configs.""" - if not self._event_repo: - self._event_repo = EventRepository( - session_maker=self.db.async_session_maker, message_queue=self.message_queue - ) - return self._event_repo - - @property - def reprovisioning_repo(self) -> ReprovisioningRepository: - """The DB adapter for reprovisioning.""" - if not self._reprovisioning_repo: - self._reprovisioning_repo = ReprovisioningRepository(session_maker=self.db.async_session_maker) - return self._reprovisioning_repo - - @property - def project_repo(self) -> ProjectRepository: - """The DB adapter for Renku native projects.""" - if not self._project_repo: - self._project_repo = ProjectRepository( - session_maker=self.db.async_session_maker, - authz=self.authz, - message_queue=self.message_queue, - event_repo=self.event_repo, - group_repo=self.group_repo, - ) - return self._project_repo - - @property - def project_member_repo(self) -> ProjectMemberRepository: - """The DB adapter for Renku native projects members.""" - if not self._project_member_repo: - self._project_member_repo = ProjectMemberRepository( - session_maker=self.db.async_session_maker, - authz=self.authz, - event_repo=self.event_repo, - message_queue=self.message_queue, - ) - return self._project_member_repo - - @property - def group_repo(self) -> GroupRepository: - """The DB adapter for Renku groups.""" - if not self._group_repo: - self._group_repo = GroupRepository( - session_maker=self.db.async_session_maker, - event_repo=self.event_repo, - group_authz=self.authz, - message_queue=self.message_queue, - ) - return self._group_repo - - @property - def session_repo(self) -> SessionRepository: - """The DB adapter for sessions.""" - if not self._session_repo: - self._session_repo = SessionRepository( - session_maker=self.db.async_session_maker, project_authz=self.authz, resource_pools=self.rp_repo - ) - return self._session_repo - - @property - def user_preferences_repo(self) -> UserPreferencesRepository: - """The DB adapter for user preferences.""" - if not self._user_preferences_repo: - self._user_preferences_repo = UserPreferencesRepository( - session_maker=self.db.async_session_maker, - user_preferences_config=self.user_preferences_config, - ) - return self._user_preferences_repo - - @property - def kc_user_repo(self) -> KcUserRepo: - """The DB adapter for users.""" - if not self._kc_user_repo: - self._kc_user_repo = KcUserRepo( - session_maker=self.db.async_session_maker, - message_queue=self.message_queue, - event_repo=self.event_repo, - group_repo=self.group_repo, - encryption_key=self.encryption_key, - authz=self.authz, - ) - return self._kc_user_repo - - @property - def user_secrets_repo(self) -> UserSecretsRepo: - """The DB adapter for user secrets storage.""" - if not self._user_secrets_repo: - self._user_secrets_repo = UserSecretsRepo( - session_maker=self.db.async_session_maker, - ) - return self._user_secrets_repo - - @property - def connected_services_repo(self) -> ConnectedServicesRepository: - """The DB adapter for connected services.""" - if not self._connected_services_repo: - self._connected_services_repo = ConnectedServicesRepository( - session_maker=self.db.async_session_maker, - encryption_key=self.encryption_key, - async_oauth2_client_class=self.async_oauth2_client_class, - internal_gitlab_url=self.gitlab_url, - ) - return self._connected_services_repo - - @property - def git_repositories_repo(self) -> GitRepositoriesRepository: - """The DB adapter for repositories.""" - if not self._git_repositories_repo: - self._git_repositories_repo = GitRepositoriesRepository( - session_maker=self.db.async_session_maker, - connected_services_repo=self.connected_services_repo, - internal_gitlab_url=self.gitlab_url, - ) - return self._git_repositories_repo - - @property - def platform_repo(self) -> PlatformRepository: - """The DB adapter for the platform configuration.""" - if not self._platform_repo: - self._platform_repo = PlatformRepository( - session_maker=self.db.async_session_maker, - ) - return self._platform_repo - - @property - def data_connector_repo(self) -> DataConnectorRepository: - """The DB adapter for data connectors.""" - if not self._data_connector_repo: - self._data_connector_repo = DataConnectorRepository( - session_maker=self.db.async_session_maker, authz=self.authz - ) - return self._data_connector_repo - - @property - def data_connector_to_project_link_repo(self) -> DataConnectorProjectLinkRepository: - """The DB adapter for data connector to project links.""" - if not self._data_connector_to_project_link_repo: - self._data_connector_to_project_link_repo = DataConnectorProjectLinkRepository( - session_maker=self.db.async_session_maker, authz=self.authz - ) - return self._data_connector_to_project_link_repo +@dataclass +class TrustedProxiesConfig: + """Configuration for trusted reverse proxies.""" - @property - def data_connector_secret_repo(self) -> DataConnectorSecretRepository: - """The DB adapter for data connector secrets.""" - if not self._data_connector_secret_repo: - self._data_connector_secret_repo = DataConnectorSecretRepository( - session_maker=self.db.async_session_maker, - data_connector_repo=self.data_connector_repo, - user_repo=self.kc_user_repo, - secret_service_public_key=self.secrets_service_public_key, - authz=self.authz, - ) - return self._data_connector_secret_repo + proxies_count: int | None = None + real_ip_header: str | None = None @classmethod - def from_env(cls, prefix: str = "") -> "Config": + def from_env(cls) -> "TrustedProxiesConfig": """Create a config from environment variables.""" - - user_store: base_models.UserStore - authenticator: base_models.Authenticator - gitlab_authenticator: base_models.Authenticator - gitlab_client: base_models.GitlabAPIProtocol - user_preferences_config: UserPreferencesConfig - version = os.environ.get(f"{prefix}VERSION", "0.0.1") - server_options_file = os.environ.get("NB_SERVER_OPTIONS__UI_CHOICES_PATH") - server_defaults_file = os.environ.get("NB_SERVER_OPTIONS__DEFAULTS_PATH") - k8s_namespace = os.environ.get("K8S_NAMESPACE", "default") - max_pinned_projects = int(os.environ.get(f"{prefix}MAX_PINNED_PROJECTS", "10")) - user_preferences_config = UserPreferencesConfig(max_pinned_projects=max_pinned_projects) - db = DBConfig.from_env(prefix) - kc_api: IKeycloakAPI - secrets_service_public_key: PublicKeyTypes - gitlab_url: str | None - - if os.environ.get(f"{prefix}DUMMY_STORES", "false").lower() == "true": - encryption_key = secrets.token_bytes(32) - secrets_service_public_key_path = os.getenv(f"{prefix}SECRETS_SERVICE_PUBLIC_KEY_PATH") - if secrets_service_public_key_path is not None: - secrets_service_public_key = serialization.load_pem_public_key( - Path(secrets_service_public_key_path).read_bytes() - ) - else: - private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) - secrets_service_public_key = private_key.public_key() - - authenticator = DummyAuthenticator() - gitlab_authenticator = DummyAuthenticator() - quota_repo = QuotaRepository(DummyCoreClient({}, {}), DummySchedulingClient({}), namespace=k8s_namespace) - user_always_exists = os.environ.get("DUMMY_USERSTORE_USER_ALWAYS_EXISTS", "true").lower() == "true" - user_store = DummyUserStore(user_always_exists=user_always_exists) - gitlab_client = DummyGitlabAPI() - dummy_users = [ - UnsavedUserInfo(id="user1", first_name="user1", last_name="doe", email="user1@doe.com"), - UnsavedUserInfo(id="user2", first_name="user2", last_name="doe", email="user2@doe.com"), - ] - kc_api = DummyKeycloakAPI(users=[i._to_keycloak_dict() for i in dummy_users]) - redis = RedisConfig.fake() - gitlab_url = None - else: - encryption_key_path = os.getenv(f"{prefix}ENCRYPTION_KEY_PATH", "/encryption-key") - encryption_key = Path(encryption_key_path).read_bytes() - secrets_service_public_key_path = os.getenv( - f"{prefix}SECRETS_SERVICE_PUBLIC_KEY_PATH", "/secret_service_public_key" - ) - secrets_service_public_key = serialization.load_pem_public_key( - Path(secrets_service_public_key_path).read_bytes() - ) - quota_repo = QuotaRepository(K8sCoreClient(), K8sSchedulingClient(), namespace=k8s_namespace) - keycloak_url = os.environ.get(f"{prefix}KEYCLOAK_URL") - if keycloak_url is None: - raise errors.ConfigurationError(message="The Keycloak URL has to be specified.") - keycloak_url = keycloak_url.rstrip("/") - keycloak_realm = os.environ.get(f"{prefix}KEYCLOAK_REALM", "Renku") - oidc_disc_data = oidc_discovery(keycloak_url, keycloak_realm) - jwks_url = oidc_disc_data.get("jwks_uri") - if jwks_url is None: - raise errors.ConfigurationError( - message="The JWKS url for Keycloak cannot be found from the OIDC discovery endpoint." - ) - algorithms = os.environ.get(f"{prefix}KEYCLOAK_TOKEN_SIGNATURE_ALGS") - if algorithms is None: - raise errors.ConfigurationError(message="At least one token signature algorithm is required.") - algorithms_lst = [i.strip() for i in algorithms.split(",")] - jwks = PyJWKClient(jwks_url) - authenticator = KeycloakAuthenticator(jwks=jwks, algorithms=algorithms_lst) - gitlab_url = os.environ.get(f"{prefix}GITLAB_URL") - if gitlab_url is None: - raise errors.ConfigurationError(message="Please provide the gitlab instance URL") - gitlab_authenticator = GitlabAuthenticator(gitlab_url=gitlab_url) - user_store = KcUserStore(keycloak_url=keycloak_url, realm=keycloak_realm) - gitlab_client = GitlabAPI(gitlab_url=gitlab_url) - client_id = os.environ[f"{prefix}KEYCLOAK_CLIENT_ID"] - client_secret = os.environ[f"{prefix}KEYCLOAK_CLIENT_SECRET"] - kc_api = KeycloakAPI( - keycloak_url=keycloak_url, - client_id=client_id, - client_secret=client_secret, - realm=keycloak_realm, - ) - redis = RedisConfig.from_env(prefix) - - if not isinstance(secrets_service_public_key, rsa.RSAPublicKey): - raise errors.ConfigurationError(message="Secret service public key is not an RSAPublicKey") - - sentry = SentryConfig.from_env(prefix) - trusted_proxies = TrustedProxiesConfig.from_env(prefix) - message_queue = RedisQueue(redis) - nb_config = NotebooksConfig.from_env(db) - - return cls( - version=version, - authenticator=authenticator, - gitlab_authenticator=gitlab_authenticator, - gitlab_client=gitlab_client, - user_store=user_store, - quota_repo=quota_repo, - sentry=sentry, - trusted_proxies=trusted_proxies, - server_defaults_file=server_defaults_file, - server_options_file=server_options_file, - user_preferences_config=user_preferences_config, - db=db, - redis=redis, - kc_api=kc_api, - message_queue=message_queue, - encryption_key=encryption_key, - secrets_service_public_key=secrets_service_public_key, - gitlab_url=gitlab_url, - nb_config=nb_config, - ) + proxies_count = int(os.environ.get("PROXIES_COUNT") or "0") + real_ip_header = os.environ.get("REAL_IP_HEADER") + return cls(proxies_count=proxies_count or None, real_ip_header=real_ip_header or None) diff --git a/components/renku_data_services/app_config/logging.py b/components/renku_data_services/app_config/logging.py new file mode 100644 index 000000000..bd0255e9a --- /dev/null +++ b/components/renku_data_services/app_config/logging.py @@ -0,0 +1,335 @@ +"""Logging configuration. + +This is a central place for configuring the logging library, so that +all log messages have the same format. The intention is to use it like +described in the manual of python logging: + +Define a module based logger like this: + +``` python +import renku_data_services.app_config.logging as logging + +logger = logging.getLogger(__name__) +``` + +In order to make sure, our loggers are always below +`renku_data_services`, it is recommended to use the `getLogger` +function of this module. It will only delegate to the logging library +making sure the logger name is prefixed correctly. + +Additionally, with `set_request_id` a request id can be provided that +will be injected into every log record. This id is managed by a +ContextVar to be retained correctly in async contexts. + +Before accessing loggers, run the `configure_logging()` method to +configure loggers appropriately. + +""" + +from __future__ import annotations + +import contextvars +import json +import logging +import os +from dataclasses import dataclass, field +from datetime import datetime +from enum import StrEnum +from logging import Logger +from typing import Final, cast, final + +from renku_data_services.errors.errors import ConfigurationError + +__app_root_logger: Final[str] = "renku_data_services" + +_request_var: contextvars.ContextVar[str] = contextvars.ContextVar("request_id") + + +def getLogger(name: str) -> Logger: + """Return a logger with the name prefixed with our app name, if not already done.""" + if name.startswith(__app_root_logger + "."): + return logging.getLogger(name) + else: + return logging.getLogger(f"{__app_root_logger}.{name}") + + +def set_request_id(rid: str | None) -> None: + """Provide the request_id as a context sensitiv global variable. + + The id will be used in subsequent logging statements. + """ + if rid is None: + _request_var.set("") + else: + _request_var.set(rid) + + +class _RenkuLogFormatter(logging.Formatter): + """Custom formatter. + + It is used to encapsulate the formatting options and to use + datetime instead of struct_time. + """ + + def __init__(self) -> None: + super().__init__( + fmt=( + "%(asctime)s [%(levelname)s] %(process)d/%(threadName)s " + "%(name)s (%(filename)s:%(lineno)d) [%(request_id)s] - %(message)s" + ), + datefmt="%Y-%m-%dT%H:%M:%S.%f%z", + ) + + def formatTime(self, record: logging.LogRecord, datefmt: str | None = None) -> str: + """Overriden to format the time string for %(asctime) interpolator.""" + ct = datetime.fromtimestamp(record.created) + return ct.strftime(cast(str, self.datefmt)) + + +class _RenkuJsonFormatter(_RenkuLogFormatter): + """Formatter to produce json log messages.""" + + fields: Final[set[str]] = set( + [ + "name", + "levelno", + "pathname", + "module", + "filename", + "lineno", + ] + ) + default_fields: Final[set[str]] = set(fields).union(set(["exc_info", "stack_info", "asctime", "message", "msg"])) + + def format(self, record: logging.LogRecord) -> str: + """Format the log record.""" + super().format(record) + return json.dumps(self._to_dict(record)) + + def _to_dict(self, record: logging.LogRecord) -> dict: + base = {field: getattr(record, field, None) for field in self.fields} + extra = {key: value for key, value in record.__dict__.items() if key not in self.default_fields} + info = {} + if record.exc_info: + info["exc_info"] = self.formatException(record.exc_info) + if record.stack_info: + info["stack_info"] = self.formatStack(record.stack_info) + return { + "timestamp": self.formatTime(record, self.datefmt), + "level": record.levelname, + "message": record.getMessage(), + **base, + **info, + **extra, + } + + +class LogFormatStyle(StrEnum): + """Supported log formats.""" + + plain = "plain" + json = "json" + + def to_formatter(self) -> logging.Formatter: + """Return the formatter instance corresponding to this format style.""" + match self: + case LogFormatStyle.plain: + return _RenkuLogFormatter() + case LogFormatStyle.json: + return _RenkuJsonFormatter() + + @classmethod + def from_env(cls, prefix: str = "", default: str = "plain") -> LogFormatStyle: + """Read the format style from env var `LOG_FORMAT`.""" + str_value = os.environ.get(f"{prefix}LOG_FORMAT_STYLE", default).lower() + match str_value: + case "plain": + return LogFormatStyle.plain + case "json": + return LogFormatStyle.json + case _: + return LogFormatStyle.plain + + +@final +class _Utils: + @classmethod + def get_numeric_level(cls, level_name: str) -> int: + ln = logging.getLevelNamesMapping().get(level_name.upper()) + if ln is None: + raise ConfigurationError(message=f"Logging config problem: level name '{level_name}' is not known.") + return ln + + @classmethod + def _logger_list_from_env(cls, level: int, prefix: str) -> set[str]: + level_name = logging._levelToName.get(level) + if level_name is None: + return set() + + key = f"{prefix}{level_name.upper()}_LOGGING" + value = os.environ.get(key, "").strip() + if value == "": + return set() + + return set([n.strip() for n in value.split(",")]) + + @classmethod + def logger_levels_from_env(cls, prefix: str) -> dict[int, set[str]]: + config = {} + for level in list(logging._levelToName.keys()): + logger_names = cls._logger_list_from_env(level, prefix) + if logger_names != set(): + config.update({level: logger_names}) + + return config + + @classmethod + def get_all_loggers(cls) -> list[logging.Logger]: + """Return the current snapshot of all loggers, including the root logger.""" + all_loggers = [log for log in logging.Logger.manager.loggerDict.values() if isinstance(log, logging.Logger)] + all_loggers.append(logging.root) + return all_loggers + + +@dataclass +class Config: + """Configuration for logging.""" + + format_style: LogFormatStyle = LogFormatStyle.plain + root_level: int = logging.WARNING + app_level: int = logging.INFO + override_levels: dict[int, set[str]] = field(default_factory=dict) + + def update_override_levels(self, others: dict[int, set[str]]) -> None: + """Applies the given override levels to this config.""" + other_loggers = set() + [other_loggers := other_loggers.union(x) for x in others.values()] + self.remove_override_loggers(other_loggers) + for level, names in others.items(): + cur_names = self.override_levels.get(level) or set() + cur_names = names.union(cur_names) + self.override_levels.update({level: cur_names}) + + def remove_override_loggers(self, loggers: set[str]) -> None: + """Removes the given loggers from the override levels config.""" + next_levels = {} + for level, names in self.override_levels.items(): + next_names = names.difference(loggers) + if next_names != set(): + next_levels.update({level: next_names}) + self.override_levels = next_levels + + @classmethod + def from_env(cls, prefix: str = "") -> Config: + """Return a config obtained from environment variables.""" + default = cls() + match os.environ.get(f"{prefix}LOG_ROOT_LEVEL"): + case None: + root_level = default.root_level + case lvl: + root_level = _Utils.get_numeric_level(lvl) + match os.environ.get(f"{prefix}LOG_APP_LEVEL"): + case None: + app_level = default.app_level + case lvl: + app_level = _Utils.get_numeric_level(lvl) + format_style = LogFormatStyle.from_env(prefix, default.format_style.value) + levels = _Utils.logger_levels_from_env(prefix) + return Config(format_style, root_level, app_level, levels) + + +class _RequestIdFilter(logging.Filter): + """Hack the request id into the log record.""" + + def filter(self, record: logging.LogRecord) -> bool: + rid = _request_var.get(None) or "-" + record.request_id = rid + return True + + +def configure_logging(cfg: Config | None = None) -> None: + """Configures logging library. + + This should run before using a logger. It sets all loggers to + WARNING, except for our code that will log at INFO. Our code is + identified by the app root logger `renku_data_services`. All our + loggers should therefore be children of this logger. + + Level for individual loggers can be overriden using the + `override_levels` argument. It is a map from logging level to a + list of logger names. The default reads it from environment + variables like `DEBUG_LOGGING=logger.name.one,logger.name.two`. + The pattern is `{LEVEL}_LOGGING` the value is a comma separated + list of logger names that will be configured to a minimum level of + `{LEVEL}`. + + """ + if cfg is None: + cfg = Config.from_env() + + # To have a uniform format *everywhere*, there is only one + # handler. It is added to the root logger. However, imported + # modules may change this configuration at any time (and they do). + # This tries to remove all existing handlers as an best effort. + for ll in _Utils.get_all_loggers(): + ll.setLevel(logging.NOTSET) + for hdl in ll.handlers: + ll.removeHandler(hdl) + + handler = logging.StreamHandler() + handler.setFormatter(cfg.format_style.to_formatter()) + handler.addFilter(_RequestIdFilter()) + logging.root.setLevel(cfg.root_level) + logging.root.addHandler(handler) + logging.getLogger(__app_root_logger).setLevel(cfg.app_level) + + # this is for creating backwards compatibility, ideally these are + # defined as env vars in the specific process + logging.getLogger("sanic").setLevel(logging.INFO) + logging.getLogger("alembic").setLevel(logging.INFO) + + logger = getLogger(__name__) + + # override minimum level for specific loggers + for level, names in cfg.override_levels.items(): + for name in names: + logger.info(f"Set threshold level: {name} -> {logging.getLevelName(level)}") + logging.getLogger(name).setLevel(level) + + +def print_logger_setting(msg: str | None = None, show_all: bool = False) -> None: + """Prints the current logger settings. + + It intentionally uses `print` to survive a messed up logger + config. It prints all loggers that have an explicit set level. + Others, like those with a `NOT_SET` level and the 'PlaceHolder' + loggers are not printed. + + """ + l_root = logging.Logger.root + output = ["=" * 65] + if msg is not None: + output.append(msg.center(65, "-")) + + output.append(f"Total logger entries: {len(logging.Logger.manager.loggerDict)}") + output.append( + f" * {l_root} (self.level={logging.getLevelName(l_root.level)}, handlers={len(logging.Logger.root.handlers)})" + ) + for name in logging.Logger.manager.loggerDict: + ll = logging.Logger.manager.loggerDict[name] + match ll: + case logging.Logger() as logger: + level_name = logging.getLevelName(logger.level) + eff_level_name = logging.getLevelName(ll.getEffectiveLevel()) + show_item = logger.level != logging.NOTSET + handlers = logger.handlers + case logging.PlaceHolder(): + level_name = "{NOT_SET}" + eff_level_name = "{PlaceHolder}" + show_item = False + handlers = [] + + if show_all or show_item: + output.append(f" * Logger({name} @{eff_level_name}, self.level={level_name}, handlers={len(handlers)})") + output.append("" * 65) + print("\n".join(output)) diff --git a/components/renku_data_services/authn/keycloak.py b/components/renku_data_services/authn/keycloak.py index 7e615ef2d..2f082929a 100644 --- a/components/renku_data_services/authn/keycloak.py +++ b/components/renku_data_services/authn/keycloak.py @@ -70,10 +70,10 @@ def _validate(self, token: str) -> dict[str, Any]: # NOTE: the above errors are subclasses of `InvalidToken` below but they will result from keycloak # misconfiguration most often rather than from the user having done something so we surface them. raise - except jwt.InvalidTokenError: - raise errors.UnauthorizedError( - message="Your credentials are invalid or expired, please log in again.", quiet=True - ) + except jwt.InvalidTokenError as err: + raise errors.InvalidTokenError( + message="Your credentials are invalid or expired, please log in again." + ) from err async def authenticate( self, access_token: str, request: Request @@ -93,8 +93,8 @@ async def authenticate( email = parsed.get("email") if id is None or email is None: raise errors.UnauthorizedError( - message="Your credentials are invalid or expired, please log in again.", quiet=True - ) + message="Your credentials are invalid or expired, please log in again." + ) from None user = base_models.AuthenticatedAPIUser( is_admin=is_admin, id=id, diff --git a/components/renku_data_services/authz/authz.py b/components/renku_data_services/authz/authz.py index 30f3f996f..1f9c0462b 100644 --- a/components/renku_data_services/authz/authz.py +++ b/components/renku_data_services/authz/authz.py @@ -30,11 +30,11 @@ ZedToken, ) from authzed.api.v1.permission_service_pb2 import LOOKUP_PERMISSIONSHIP_HAS_PERMISSION -from sanic.log import logger from sqlalchemy.ext.asyncio import AsyncSession from ulid import ULID from renku_data_services import base_models +from renku_data_services.app_config import logging from renku_data_services.authz.config import AuthzConfig from renku_data_services.authz.models import ( Change, @@ -45,25 +45,30 @@ Scope, Visibility, ) -from renku_data_services.base_models.core import InternalServiceAdmin +from renku_data_services.base_models.core import InternalServiceAdmin, ResourceType from renku_data_services.data_connectors.models import ( DataConnector, DataConnectorToProjectLink, DataConnectorUpdate, DeletedDataConnector, + GlobalDataConnector, ) from renku_data_services.errors import errors from renku_data_services.namespace.models import ( DeletedGroup, Group, + GroupNamespace, GroupUpdate, Namespace, NamespaceKind, - NamespaceUpdate, + ProjectNamespace, + UserNamespace, ) from renku_data_services.project.models import DeletedProject, Project, ProjectUpdate from renku_data_services.users.models import DeletedUser, UserInfo, UserInfoUpdate +logger = logging.getLogger(__name__) + _P = ParamSpec("_P") @@ -88,6 +93,7 @@ def authz(self) -> "Authz": | UserInfo | DeletedUser | DataConnector + | GlobalDataConnector | DataConnectorUpdate | DeletedDataConnector | DataConnectorToProjectLink @@ -118,18 +124,18 @@ def extend(self, other: "_AuthzChange") -> None: class _Relation(StrEnum): """Relations for Authzed.""" - owner: str = "owner" - editor: str = "editor" - viewer: str = "viewer" - public_viewer: str = "public_viewer" - admin: str = "admin" - project_platform: str = "project_platform" - group_platform: str = "group_platform" - user_namespace_platform: str = "user_namespace_platform" - project_namespace: str = "project_namespace" - data_connector_platform: str = "data_connector_platform" - data_connector_namespace: str = "data_connector_namespace" - linked_to: str = "linked_to" + owner = "owner" + editor = "editor" + viewer = "viewer" + public_viewer = "public_viewer" + admin = "admin" + project_platform = "project_platform" + group_platform = "group_platform" + user_namespace_platform = "user_namespace_platform" + project_namespace = "project_namespace" + data_connector_platform = "data_connector_platform" + data_connector_namespace = "data_connector_namespace" + linked_to = "linked_to" @classmethod def from_role(cls, role: Role) -> "_Relation": @@ -153,28 +159,16 @@ def to_role(self) -> Role: raise errors.ProgrammingError(message=f"Cannot map relation {self} to any role") -class ResourceType(StrEnum): - """All possible resources stored in Authzed.""" - - project: str = "project" - user: str = "user" - anonymous_user: str = "anonymous_user" - platform: str = "platform" - group: str = "group" - user_namespace: str = "user_namespace" - data_connector: str = "data_connector" - - class AuthzOperation(StrEnum): """The type of change that requires authorization database update.""" - create: str = "create" - delete: str = "delete" - update: str = "update" - update_or_insert: str = "update_or_insert" - insert_many: str = "insert_many" - create_link: str = "create_link" - delete_link: str = "delete_link" + create = "create" + delete = "delete" + update = "update" + update_or_insert = "update_or_insert" + insert_many = "insert_many" + create_link = "create_link" + delete_link = "delete_link" class _AuthzConverter: @@ -210,10 +204,12 @@ def all_users() -> ObjectReference: @staticmethod def group(id: ULID) -> ObjectReference: + """The id should be the id of the GroupORM object in the DB.""" return ObjectReference(object_type=ResourceType.group, object_id=str(id)) @staticmethod def user_namespace(id: ULID) -> ObjectReference: + """The id should be the id of the NamespaceORM object in the DB.""" return ObjectReference(object_type=ResourceType.user_namespace, object_id=str(id)) @staticmethod @@ -276,6 +272,7 @@ async def decorated_function( | DeletedGroup | Namespace | DataConnector + | GlobalDataConnector | DeletedDataConnector | None ) = None @@ -287,7 +284,7 @@ async def decorated_function( case ResourceType.user_namespace if isinstance(potential_resource, Namespace): resource = potential_resource case ResourceType.data_connector if isinstance( - potential_resource, (DataConnector, DeletedDataConnector) + potential_resource, (DataConnector, GlobalDataConnector, DeletedDataConnector) ): resource = potential_resource case _: @@ -428,7 +425,7 @@ async def has_permissions( pair.HasField("item") and pair.item.permissionship == CheckPermissionResponse.PERMISSIONSHIP_HAS_PERMISSION, ) - for item, pair in zip(items, response.pairs) + for item, pair in zip(items, response.pairs, strict=True) ] async def resources_with_permission( @@ -600,121 +597,125 @@ def authz_change( ]: """A decorator that updates the authorization database for different types of operations.""" - def _extract_user_from_args(*args: _P.args, **kwargs: _P.kwargs) -> base_models.APIUser: - if len(args) == 0: - user_kwarg = kwargs.get("user") - requested_by_kwarg = kwargs.get("requested_by") - if isinstance(user_kwarg, base_models.APIUser) and isinstance(requested_by_kwarg, base_models.APIUser): + def decorator( + f: Callable[Concatenate[_WithAuthz, _P], Awaitable[_AuthzChangeFuncResult]], + ) -> Callable[Concatenate[_WithAuthz, _P], Awaitable[_AuthzChangeFuncResult]]: + def _extract_user_from_args(*args: _P.args, **kwargs: _P.kwargs) -> base_models.APIUser: + if len(args) == 0: + user_kwarg = kwargs.get("user") + requested_by_kwarg = kwargs.get("requested_by") + if isinstance(user_kwarg, base_models.APIUser) and isinstance( + requested_by_kwarg, base_models.APIUser + ): + raise errors.ProgrammingError( + message="The decorator for authorization database changes found two APIUser parameters in" + " the 'user' and 'requested_by' keyword arguments but expected only one of them to be " + "present." + ) + potential_user = user_kwarg if isinstance(user_kwarg, base_models.APIUser) else requested_by_kwarg + else: + potential_user = args[0] + if not isinstance(potential_user, base_models.APIUser): raise errors.ProgrammingError( - message="The decorator for authorization database changes found two APIUser parameters in the " - "'user' and 'requested_by' keyword arguments but expected only one of them to be present." + message="The decorator for authorization database changes could not find APIUser in the " + f"function arguments, the type of the argument that was found is {type(potential_user)}." ) - potential_user = user_kwarg if isinstance(user_kwarg, base_models.APIUser) else requested_by_kwarg - else: - potential_user = args[0] - if not isinstance(potential_user, base_models.APIUser): - raise errors.ProgrammingError( - message="The decorator for authorization database changes could not find APIUser in the function " - f"arguments, the type of the argument that was found is {type(potential_user)}." - ) - return potential_user - - async def _get_authz_change( - db_repo: _WithAuthz, - operation: AuthzOperation, - resource: ResourceType, - result: _AuthzChangeFuncResult, - *func_args: _P.args, - **func_kwargs: _P.kwargs, - ) -> _AuthzChange: - authz_change = _AuthzChange() - match operation, resource: - case AuthzOperation.create, ResourceType.project if isinstance(result, Project): - authz_change = db_repo.authz._add_project(result) - case AuthzOperation.delete, ResourceType.project if isinstance(result, DeletedProject): - user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change = await db_repo.authz._remove_project(user, result) - case AuthzOperation.delete, ResourceType.project if result is None: - # NOTE: This means that the project does not exist in the first place so nothing was deleted - pass - case AuthzOperation.update, ResourceType.project if isinstance(result, ProjectUpdate): - authz_change = _AuthzChange() - if result.old.visibility != result.new.visibility: + return potential_user + + async def _get_authz_change( + db_repo: _WithAuthz, + operation: AuthzOperation, + resource: ResourceType, + result: _AuthzChangeFuncResult, + *func_args: _P.args, + **func_kwargs: _P.kwargs, + ) -> _AuthzChange: + authz_change = _AuthzChange() + match operation, resource: + case AuthzOperation.create, ResourceType.project if isinstance(result, Project): + authz_change = db_repo.authz._add_project(result) + case AuthzOperation.delete, ResourceType.project if isinstance(result, DeletedProject): user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change.extend(await db_repo.authz._update_project_visibility(user, result.new)) - if result.old.namespace.id != result.new.namespace.id: + authz_change = await db_repo.authz._remove_project(user, result) + case AuthzOperation.delete, ResourceType.project if result is None: + # NOTE: This means that the project does not exist in the first place so nothing was deleted + pass + case AuthzOperation.update, ResourceType.project if isinstance(result, ProjectUpdate): + authz_change = _AuthzChange() + if result.old.visibility != result.new.visibility: + user = _extract_user_from_args(*func_args, **func_kwargs) + authz_change.extend(await db_repo.authz._update_project_visibility(user, result.new)) + if result.old.namespace.id != result.new.namespace.id: + user = _extract_user_from_args(*func_args, **func_kwargs) + authz_change.extend(await db_repo.authz._update_project_namespace(user, result.new)) + case AuthzOperation.create, ResourceType.group if isinstance(result, Group): + authz_change = db_repo.authz._add_group(result) + case AuthzOperation.delete, ResourceType.group if isinstance(result, DeletedGroup): user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change.extend(await db_repo.authz._update_project_namespace(user, result.new)) - case AuthzOperation.create, ResourceType.group if isinstance(result, Group): - authz_change = db_repo.authz._add_group(result) - case AuthzOperation.delete, ResourceType.group if isinstance(result, DeletedGroup): - user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change = await db_repo.authz._remove_group(user, result) - case AuthzOperation.delete, ResourceType.group if result is None: - # NOTE: This means that the group does not exist in the first place so nothing was deleted - pass - case AuthzOperation.update_or_insert, ResourceType.user if isinstance(result, UserInfoUpdate): - if result.old is None: - authz_change = db_repo.authz._add_user_namespace(result.new.namespace) - case AuthzOperation.delete, ResourceType.user if isinstance(result, DeletedUser): - user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change = await db_repo.authz._remove_user_namespace(result.id) - authz_change.extend(await db_repo.authz._remove_user(user, result)) - case AuthzOperation.delete, ResourceType.user if result is None: - # NOTE: This means that the user does not exist in the first place so nothing was deleted - pass - case AuthzOperation.insert_many, ResourceType.user_namespace if isinstance(result, list): - for res in result: - if not isinstance(res, UserInfo): - raise errors.ProgrammingError( - message="Expected list of UserInfo when generating authorization " - f"database updates for inserting namespaces but found {type(res)}" - ) - authz_change.extend(db_repo.authz._add_user_namespace(res.namespace)) - case AuthzOperation.create, ResourceType.data_connector if isinstance(result, DataConnector): - authz_change = db_repo.authz._add_data_connector(result) - case AuthzOperation.delete, ResourceType.data_connector if result is None: - # NOTE: This means that the data connector does not exist in the first place so nothing was deleted - pass - case AuthzOperation.delete, ResourceType.data_connector if isinstance(result, DeletedDataConnector): - user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change = await db_repo.authz._remove_data_connector(user, result) - case AuthzOperation.update, ResourceType.data_connector if isinstance(result, DataConnectorUpdate): - authz_change = _AuthzChange() - if result.old.visibility != result.new.visibility: + authz_change = await db_repo.authz._remove_group(user, result) + case AuthzOperation.delete, ResourceType.group if result is None: + # NOTE: This means that the group does not exist in the first place so nothing was deleted + pass + case AuthzOperation.update_or_insert, ResourceType.user if isinstance(result, UserInfoUpdate): + if result.old is None: + authz_change = db_repo.authz._add_user_namespace(result.new.namespace) + case AuthzOperation.delete, ResourceType.user if isinstance(result, DeletedUser): user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change.extend(await db_repo.authz._update_data_connector_visibility(user, result.new)) - if result.old.namespace.id != result.new.namespace.id: + authz_change = await db_repo.authz._remove_user_namespace(result.id) + authz_change.extend(await db_repo.authz._remove_user(user, result)) + case AuthzOperation.delete, ResourceType.user if result is None: + # NOTE: This means that the user does not exist in the first place so nothing was deleted + pass + case AuthzOperation.insert_many, ResourceType.user_namespace if isinstance(result, list): + for res in result: + if not isinstance(res, UserInfo): + raise errors.ProgrammingError( + message="Expected list of UserInfo when generating authorization " + f"database updates for inserting namespaces but found {type(res)}" + ) + authz_change.extend(db_repo.authz._add_user_namespace(res.namespace)) + case AuthzOperation.create, ResourceType.data_connector if isinstance(result, DataConnector): + authz_change = db_repo.authz._add_data_connector(result) + case AuthzOperation.create, ResourceType.data_connector if isinstance(result, GlobalDataConnector): + authz_change = db_repo.authz._add_global_data_connector(result) + case AuthzOperation.delete, ResourceType.data_connector if result is None: + # NOTE: This means that the dc does not exist in the first place so nothing was deleted + pass + case AuthzOperation.delete, ResourceType.data_connector if isinstance(result, DeletedDataConnector): user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change.extend(await db_repo.authz._update_data_connector_namespace(user, result.new)) - case AuthzOperation.create_link, ResourceType.data_connector if isinstance( - result, DataConnectorToProjectLink - ): - user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change = await db_repo.authz._add_data_connector_to_project_link(user, result) - case AuthzOperation.delete_link, ResourceType.data_connector if result is None: - # NOTE: This means that the link does not exist in the first place so nothing was deleted - pass - case AuthzOperation.delete_link, ResourceType.data_connector if isinstance( - result, DataConnectorToProjectLink - ): - user = _extract_user_from_args(*func_args, **func_kwargs) - authz_change = await db_repo.authz._remove_data_connector_to_project_link(user, result) - case _: - resource_id: str | ULID | None = "unknown" - if isinstance(result, (Project, Namespace, Group, DataConnector)): - resource_id = result.id - elif isinstance(result, (ProjectUpdate, NamespaceUpdate, GroupUpdate, DataConnectorUpdate)): - resource_id = result.new.id - raise errors.ProgrammingError( - message=f"Encountered an unknown authorization operation {op} on resource {resource} " - f"with ID {resource_id} when updating the authorization database", - ) - return authz_change + authz_change = await db_repo.authz._remove_data_connector(user, result) + case AuthzOperation.update, ResourceType.data_connector if isinstance(result, DataConnectorUpdate): + authz_change = _AuthzChange() + if result.old.visibility != result.new.visibility: + user = _extract_user_from_args(*func_args, **func_kwargs) + authz_change.extend(await db_repo.authz._update_data_connector_visibility(user, result.new)) + if result.old.namespace != result.new.namespace: + user = _extract_user_from_args(*func_args, **func_kwargs) + if isinstance(result.new, GlobalDataConnector): + raise errors.ValidationError( + message=f"Updating the namespace of a global data connector is not supported ('{result.new.id}')" # noqa E501 + ) + authz_change.extend(await db_repo.authz._update_data_connector_namespace(user, result.new)) + case AuthzOperation.delete_link, ResourceType.data_connector if result is None: + # NOTE: This means that the link does not exist in the first place so nothing was deleted + pass + case AuthzOperation.delete_link, ResourceType.data_connector if isinstance( + result, DataConnectorToProjectLink + ): + user = _extract_user_from_args(*func_args, **func_kwargs) + authz_change = await db_repo.authz._remove_data_connector_to_project_link(user, result) + case _: + resource_id: str | ULID | None = "unknown" + if isinstance(result, (Project, Namespace, Group, DataConnector)): + resource_id = result.id + elif isinstance(result, (ProjectUpdate, GroupUpdate, DataConnectorUpdate)): + resource_id = result.new.id + raise errors.ProgrammingError( + message=f"Encountered an unknown authorization operation {op} on resource {resource} " + f"with ID {resource_id} when updating the authorization database", + ) + return authz_change - def decorator( - f: Callable[Concatenate[_WithAuthz, _P], Awaitable[_AuthzChangeFuncResult]], - ) -> Callable[Concatenate[_WithAuthz, _P], Awaitable[_AuthzChangeFuncResult]]: @wraps(f) async def decorated_function( db_repo: _WithAuthz, *args: _P.args, **kwargs: _P.kwargs @@ -1570,25 +1571,67 @@ async def _remove_user_namespace(self, user_id: str, zed_token: ZedToken | None def _add_data_connector(self, data_connector: DataConnector) -> _AuthzChange: """Create the new data connector and associated resources and relations in the DB.""" - creator = SubjectReference(object=_AuthzConverter.user(data_connector.created_by)) data_connector_res = _AuthzConverter.data_connector(data_connector.id) - creator_is_owner = Relationship(resource=data_connector_res, relation=_Relation.owner.value, subject=creator) + match data_connector.namespace: + case ProjectNamespace(): + owned_by = _AuthzConverter.project(data_connector.namespace.underlying_resource_id) + case UserNamespace(): + owned_by = _AuthzConverter.user_namespace(data_connector.namespace.id) + case GroupNamespace(): + owned_by = _AuthzConverter.group(data_connector.namespace.underlying_resource_id) + case _: + raise errors.ProgrammingError( + message="Tried to match unexpected data connector namespace kind", quiet=True + ) + owner = Relationship( + resource=data_connector_res, + relation=_Relation.data_connector_namespace, + subject=SubjectReference(object=owned_by), + ) all_users = SubjectReference(object=_AuthzConverter.all_users()) all_anon_users = SubjectReference(object=_AuthzConverter.anonymous_users()) - data_connector_namespace = SubjectReference( - object=_AuthzConverter.user_namespace(data_connector.namespace.id) - if data_connector.namespace.kind == NamespaceKind.user - else _AuthzConverter.group(cast(ULID, data_connector.namespace.underlying_resource_id)) - ) data_connector_in_platform = Relationship( resource=data_connector_res, relation=_Relation.data_connector_platform, subject=SubjectReference(object=self._platform), ) - data_connector_in_namespace = Relationship( - resource=data_connector_res, relation=_Relation.data_connector_namespace, subject=data_connector_namespace + relationships = [owner, data_connector_in_platform] + if data_connector.visibility == Visibility.PUBLIC: + all_users_are_viewers = Relationship( + resource=data_connector_res, + relation=_Relation.public_viewer.value, + subject=all_users, + ) + all_anon_users_are_viewers = Relationship( + resource=data_connector_res, + relation=_Relation.public_viewer.value, + subject=all_anon_users, + ) + relationships.extend([all_users_are_viewers, all_anon_users_are_viewers]) + apply = WriteRelationshipsRequest( + updates=[ + RelationshipUpdate(operation=RelationshipUpdate.OPERATION_TOUCH, relationship=i) for i in relationships + ] + ) + undo = WriteRelationshipsRequest( + updates=[ + RelationshipUpdate(operation=RelationshipUpdate.OPERATION_DELETE, relationship=i) for i in relationships + ] + ) + return _AuthzChange(apply=apply, undo=undo) + + def _add_global_data_connector(self, data_connector: GlobalDataConnector) -> _AuthzChange: + """Create the new global data connector and associated resources and relations in the DB.""" + data_connector_res = _AuthzConverter.data_connector(data_connector.id) + + all_users = SubjectReference(object=_AuthzConverter.all_users()) + all_anon_users = SubjectReference(object=_AuthzConverter.anonymous_users()) + data_connector_in_platform = Relationship( + resource=data_connector_res, + relation=_Relation.data_connector_platform, + subject=SubjectReference(object=self._platform), ) - relationships = [creator_is_owner, data_connector_in_platform, data_connector_in_namespace] + relationships = [data_connector_in_platform] if data_connector.visibility == Visibility.PUBLIC: all_users_are_viewers = Relationship( resource=data_connector_res, @@ -1671,7 +1714,11 @@ async def _remove_user( # NOTE changing visibility is the same access level as removal @_is_allowed_on_resource(Scope.DELETE, ResourceType.data_connector) async def _update_data_connector_visibility( - self, user: base_models.APIUser, data_connector: DataConnector, *, zed_token: ZedToken | None = None + self, + user: base_models.APIUser, + data_connector: DataConnector | GlobalDataConnector, + *, + zed_token: ZedToken | None = None, ) -> _AuthzChange: """Update the visibility of the data connector in the authorization database.""" data_connector_id_str = str(data_connector.id) @@ -1761,7 +1808,11 @@ async def _update_data_connector_visibility( # NOTE changing namespace is the same access level as removal @_is_allowed_on_resource(Scope.DELETE, ResourceType.data_connector) async def _update_data_connector_namespace( - self, user: base_models.APIUser, data_connector: DataConnector, *, zed_token: ZedToken | None = None + self, + user: base_models.APIUser, + data_connector: DataConnector, + *, + zed_token: ZedToken | None = None, ) -> _AuthzChange: """Update the namespace of the data connector in the authorization database.""" consistency = Consistency(at_least_as_fresh=zed_token) if zed_token else Consistency(fully_consistent=True) @@ -1784,13 +1835,24 @@ async def _update_data_connector_namespace( message=f"The data connector with ID {data_connector.id} whose namespace is being updated " "does not currently have a namespace." ) - if current_namespace.relationship.subject.object.object_id == data_connector.namespace.id: + match data_connector.namespace: + case UserNamespace(): + new_namespace_owner = _AuthzConverter.user_namespace(data_connector.namespace.id) + new_namespace_id = data_connector.namespace.id + case GroupNamespace(): + new_namespace_owner = _AuthzConverter.group(data_connector.namespace.underlying_resource_id) + new_namespace_id = data_connector.namespace.underlying_resource_id + case ProjectNamespace(): + new_namespace_owner = _AuthzConverter.project(data_connector.namespace.underlying_resource_id) + new_namespace_id = data_connector.namespace.underlying_resource_id + case x: + raise errors.ProgrammingError( + message=f"Received unknown namespace kind {x} when updating namespace of a data connector." + ) + + if current_namespace.relationship.subject.object.object_id == new_namespace_id: return _AuthzChange() - new_namespace_sub = ( - SubjectReference(object=_AuthzConverter.group(data_connector.namespace.id)) - if data_connector.namespace.kind == NamespaceKind.group - else SubjectReference(object=_AuthzConverter.user_namespace(data_connector.namespace.id)) - ) + new_namespace_sub = SubjectReference(object=new_namespace_owner) old_namespace_sub = ( SubjectReference( object=_AuthzConverter.group(ULID.from_str(current_namespace.relationship.subject.object.object_id)) @@ -1824,47 +1886,6 @@ async def _update_data_connector_namespace( ) return _AuthzChange(apply=apply_change, undo=undo_change) - async def _add_data_connector_to_project_link( - self, user: base_models.APIUser, link: DataConnectorToProjectLink - ) -> _AuthzChange: - """Links a data connector to a project.""" - # NOTE: we manually check for permissions here since it is not trivially expressed through decorators - allowed_from = await self.has_permission( - user, ResourceType.data_connector, link.data_connector_id, Scope.ADD_LINK - ) - if not allowed_from: - raise errors.MissingResourceError( - message=f"The user with ID {user.id} cannot perform operation {Scope.ADD_LINK} " - f"on {ResourceType.data_connector.value} " - f"with ID {link.data_connector_id} or the resource does not exist." - ) - allowed_to = await self.has_permission(user, ResourceType.project, link.project_id, Scope.WRITE) - if not allowed_to: - raise errors.MissingResourceError( - message=f"The user with ID {user.id} cannot perform operation {Scope.WRITE} " - f"on {ResourceType.project.value} " - f"with ID {link.project_id} or the resource does not exist." - ) - - data_connector_res = _AuthzConverter.data_connector(link.data_connector_id) - project_subject = SubjectReference(object=_AuthzConverter.project(link.project_id)) - relationship = Relationship( - resource=data_connector_res, - relation=_Relation.linked_to.value, - subject=project_subject, - ) - apply = WriteRelationshipsRequest( - updates=[RelationshipUpdate(operation=RelationshipUpdate.OPERATION_TOUCH, relationship=relationship)] - ) - undo = WriteRelationshipsRequest( - updates=[RelationshipUpdate(operation=RelationshipUpdate.OPERATION_DELETE, relationship=relationship)] - ) - change = _AuthzChange( - apply=apply, - undo=undo, - ) - return change - async def _remove_data_connector_to_project_link( self, user: base_models.APIUser, link: DataConnectorToProjectLink ) -> _AuthzChange: diff --git a/components/renku_data_services/authz/config.py b/components/renku_data_services/authz/config.py index 02ef4517c..b91ef0c70 100644 --- a/components/renku_data_services/authz/config.py +++ b/components/renku_data_services/authz/config.py @@ -17,12 +17,12 @@ class AuthzConfig: no_tls_connection: bool = False # If set to true it means the communication to authzed is unencrypted @classmethod - def from_env(cls, prefix: str = "") -> "AuthzConfig": + def from_env(cls) -> "AuthzConfig": """Create a configuration from environment variables.""" - host = os.environ[f"{prefix}AUTHZ_DB_HOST"] - grpc_port = os.environ.get(f"{prefix}AUTHZ_DB_GRPC_PORT", "50051") - key = os.environ[f"{prefix}AUTHZ_DB_KEY"] - no_tls_connection = os.environ.get(f"{prefix}AUTHZ_DB_NO_TLS_CONNECTION", "false").lower() == "true" + host = os.environ["AUTHZ_DB_HOST"] + grpc_port = os.environ.get("AUTHZ_DB_GRPC_PORT", "50051") + key = os.environ["AUTHZ_DB_KEY"] + no_tls_connection = os.environ.get("AUTHZ_DB_NO_TLS_CONNECTION", "false").lower() == "true" return cls(host, int(grpc_port), key, no_tls_connection) def authz_client(self) -> SyncClient: diff --git a/components/renku_data_services/authz/models.py b/components/renku_data_services/authz/models.py index cfd93536a..f26541227 100644 --- a/components/renku_data_services/authz/models.py +++ b/components/renku_data_services/authz/models.py @@ -10,15 +10,15 @@ from renku_data_services.namespace.apispec import GroupRole if TYPE_CHECKING: - from renku_data_services.authz.authz import ResourceType + from renku_data_services.base_models.core import ResourceType class Role(Enum): """Membership role.""" - OWNER: str = "owner" - VIEWER: str = "viewer" - EDITOR: str = "editor" + OWNER = "owner" + VIEWER = "viewer" + EDITOR = "editor" @classmethod def from_group_role(cls, role: GroupRole) -> "Role": @@ -36,11 +36,11 @@ def from_group_role(cls, role: GroupRole) -> "Role": def to_group_role(self) -> GroupRole: """Convert a group role into an authorization role.""" match self: - case self.OWNER: + case Role.OWNER: return GroupRole.owner - case self.EDITOR: + case Role.EDITOR: return GroupRole.editor - case self.VIEWER: + case Role.VIEWER: return GroupRole.viewer case _: raise errors.ProgrammingError(message=f"Could not convert role {self} into a group role") @@ -49,13 +49,18 @@ def to_group_role(self) -> GroupRole: class Scope(Enum): """Types of permissions - i.e. scope.""" - READ: str = "read" - WRITE: str = "write" - DELETE: str = "delete" - CHANGE_MEMBERSHIP: str = "change_membership" - READ_CHILDREN: str = "read_children" - ADD_LINK: str = "add_link" - IS_ADMIN: str = "is_admin" + READ = "read" + WRITE = "write" + DELETE = "delete" + CHANGE_MEMBERSHIP = "change_membership" + READ_CHILDREN = "read_children" + ADD_LINK = "add_link" + IS_ADMIN = "is_admin" + NON_PUBLIC_READ = "non_public_read" + EXCLUSIVE_MEMBER = "exclusive_member" + EXCLUSIVE_EDITOR = "exclusive_editor" + EXCLUSIVE_OWNER = "exclusive_owner" + DIRECT_MEMBER = "direct_member" @dataclass @@ -80,9 +85,9 @@ class Member(UnsavedMember): class Change(Enum): """The type of change executed on a specific resource.""" - UPDATE: str = "update" - ADD: str = "add" - REMOVE: str = "remove" + UPDATE = "update" + ADD = "add" + REMOVE = "remove" @dataclass @@ -96,8 +101,8 @@ class MembershipChange: class Visibility(Enum): """The visibility of a resource.""" - PUBLIC: str = "public" - PRIVATE: str = "private" + PUBLIC = "public" + PRIVATE = "private" @dataclass diff --git a/components/renku_data_services/authz/schemas.py b/components/renku_data_services/authz/schemas.py index 93470c855..b12f95063 100644 --- a/components/renku_data_services/authz/schemas.py +++ b/components/renku_data_services/authz/schemas.py @@ -22,7 +22,8 @@ ) from ulid import ULID -from renku_data_services.authz.authz import ResourceType, _AuthzConverter, _Relation +from renku_data_services.authz.authz import _AuthzConverter, _Relation +from renku_data_services.base_models.core import ResourceType from renku_data_services.errors import errors @@ -440,3 +441,237 @@ def generate_v4(public_project_ids: Iterable[str]) -> AuthzSchemaMigration: ) return AuthzSchemaMigration(up=up, down=down) + + +_v5: str = """\ +definition user {} + +definition group { + relation group_platform: platform + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = viewer + write + permission write = editor + delete + permission change_membership = delete + permission delete = owner + group_platform->is_admin + permission non_public_read = owner + editor + viewer - public_viewer +} + +definition user_namespace { + relation user_namespace_platform: platform + relation owner: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = delete + permission write = delete + permission delete = owner + user_namespace_platform->is_admin + permission non_public_read = owner - public_viewer +} + +definition anonymous_user {} + +definition platform { + relation admin: user + permission is_admin = admin +} + +definition project { + relation project_platform: platform + relation project_namespace: user_namespace | group + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + viewer + write + project_namespace->read_children + permission read_linked_resources = viewer + editor + owner + project_platform->is_admin + permission write = editor + delete + project_namespace->write + permission change_membership = delete + permission delete = owner + project_platform->is_admin + project_namespace->delete + permission non_public_read = owner + editor + viewer + project_namespace->read_children - public_viewer +} + +definition data_connector { + relation data_connector_platform: platform + relation data_connector_namespace: user_namespace | group + relation linked_to: project + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + viewer + write + \ + data_connector_namespace->read_children + read_from_linked_resource + permission read_from_linked_resource = linked_to->read_linked_resources + permission write = editor + delete + data_connector_namespace->write + permission change_membership = delete + permission delete = owner + data_connector_platform->is_admin + data_connector_namespace->delete + permission add_link = write + public_viewer + permission non_public_read = owner + editor + viewer + data_connector_namespace->read_children - public_viewer +}""" + +v5 = AuthzSchemaMigration( + up=[WriteSchemaRequest(schema=_v5)], + down=[WriteSchemaRequest(schema=_v4)], +) + +_v6: str = """\ +definition user {} + +definition group { + relation group_platform: platform + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = viewer + write + permission write = editor + delete + permission change_membership = delete + permission delete = owner + group_platform->is_admin + permission non_public_read = owner + editor + viewer - public_viewer +} + +definition user_namespace { + relation user_namespace_platform: platform + relation owner: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = delete + permission write = delete + permission delete = owner + user_namespace_platform->is_admin + permission non_public_read = owner - public_viewer +} + +definition anonymous_user {} + +definition platform { + relation admin: user + permission is_admin = admin +} + +definition project { + relation project_platform: platform + relation project_namespace: user_namespace | group + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = viewer + write + project_namespace->read_children + permission write = editor + delete + project_namespace->write + permission change_membership = delete + permission delete = owner + project_platform->is_admin + project_namespace->delete + permission non_public_read = owner + editor + viewer + project_namespace->read_children - public_viewer +} + +definition data_connector { + relation data_connector_platform: platform + relation data_connector_namespace: user_namespace | group | project + relation linked_to: project + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + viewer + write + data_connector_namespace->read_children + permission write = editor + delete + data_connector_namespace->write + permission change_membership = delete + permission delete = owner + data_connector_platform->is_admin + data_connector_namespace->delete + permission non_public_read = owner + editor + viewer + data_connector_namespace->read_children - public_viewer +}""" + +v6 = AuthzSchemaMigration( + up=[WriteSchemaRequest(schema=_v6)], + down=[WriteSchemaRequest(schema=_v5)], +) + +_v7 = """\ +definition user {} + +definition group { + relation group_platform: platform + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = viewer + write + permission write = editor + delete + permission change_membership = delete + permission delete = owner + group_platform->is_admin + permission non_public_read = owner + editor + viewer - public_viewer + permission exclusive_owner = owner + permission exclusive_editor = editor + permission exclusive_member = viewer + editor + owner + permission direct_member = owner + editor + viewer +} + +definition user_namespace { + relation user_namespace_platform: platform + relation owner: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = delete + permission write = delete + permission delete = owner + user_namespace_platform->is_admin + permission non_public_read = owner - public_viewer + permission exclusive_owner = owner + permission exclusive_member = owner + permission direct_member = owner +} + +definition anonymous_user {} + +definition platform { + relation admin: user + permission is_admin = admin +} + +definition project { + relation project_platform: platform + relation project_namespace: user_namespace | group + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + read_children + permission read_children = viewer + write + project_namespace->read_children + permission write = editor + delete + project_namespace->write + permission change_membership = delete + permission delete = owner + project_platform->is_admin + project_namespace->delete + permission non_public_read = owner + editor + viewer + project_namespace->read_children - public_viewer + permission exclusive_owner = owner + project_namespace->exclusive_owner + permission exclusive_editor = editor + project_namespace->exclusive_editor + permission exclusive_member = owner + editor + viewer + project_namespace->exclusive_member + permission direct_member = owner + editor + viewer +} + +definition data_connector { + relation data_connector_platform: platform + relation data_connector_namespace: user_namespace | group | project + relation linked_to: project + relation owner: user + relation editor: user + relation viewer: user + relation public_viewer: user:* | anonymous_user:* + permission read = public_viewer + viewer + write + data_connector_namespace->read_children + permission write = editor + delete + data_connector_namespace->write + permission change_membership = delete + permission delete = owner + data_connector_platform->is_admin + data_connector_namespace->delete + permission non_public_read = owner + editor + viewer + data_connector_namespace->read_children - public_viewer + permission exclusive_owner = owner + data_connector_namespace->exclusive_owner + permission exclusive_editor = editor + data_connector_namespace->exclusive_editor + permission exclusive_member = owner + editor + viewer + data_connector_namespace->exclusive_member + permission direct_member = owner + editor + viewer +}""" +"""This adds three permissions starting with `exclusive_` that are identifying the path of a role. + +They are used for reverse lookups (LookupResources) to determine which +objects a specific user is an owner, editor or member. +""" + +v7 = AuthzSchemaMigration( + up=[WriteSchemaRequest(schema=_v7)], + down=[WriteSchemaRequest(schema=_v6)], +) diff --git a/components/renku_data_services/base_api/error_handler.py b/components/renku_data_services/base_api/error_handler.py index 660e7f726..27d84fa6e 100644 --- a/components/renku_data_services/base_api/error_handler.py +++ b/components/renku_data_services/base_api/error_handler.py @@ -29,7 +29,7 @@ class BaseError(Protocol): class BaseErrorResponse(Protocol): - """Porotocol for the error response class of an apispec module.""" + """Protocol for the error response class of an apispec module.""" error: BaseError diff --git a/components/renku_data_services/base_api/misc.py b/components/renku_data_services/base_api/misc.py index 906e9087b..63e53e606 100644 --- a/components/renku_data_services/base_api/misc.py +++ b/components/renku_data_services/base_api/misc.py @@ -91,8 +91,8 @@ def decorator( async def decorated_function(request: Request, *args: _P.args, **kwargs: _P.kwargs) -> _T: try: return await validate(query=query)(f)(request, *args, **kwargs) - except KeyError: - raise errors.ValidationError(message="Failed to validate the query parameters") + except KeyError as err: + raise errors.ValidationError(message="Failed to validate the query parameters") from err return decorated_function diff --git a/components/renku_data_services/base_api/pagination.py b/components/renku_data_services/base_api/pagination.py index 7438283a3..1274f6397 100644 --- a/components/renku_data_services/base_api/pagination.py +++ b/components/renku_data_services/base_api/pagination.py @@ -3,10 +3,12 @@ from collections.abc import Callable, Coroutine, Sequence from functools import wraps from math import ceil -from typing import Any, Concatenate, NamedTuple, ParamSpec, cast +from typing import Any, Concatenate, NamedTuple, ParamSpec, TypeVar, cast from sanic import Request, json from sanic.response import JSONResponse +from sqlalchemy import Select +from sqlalchemy.ext.asyncio import AsyncSession from renku_data_services import errors @@ -72,16 +74,18 @@ async def decorated_function(request: Request, *args: _P.args, **kwargs: _P.kwar page_parameter = cast(int | str, query_args.get("page", default_page_number)) try: page = int(page_parameter) - except ValueError: - raise errors.ValidationError(message=f"Invalid value for parameter 'page': {page_parameter}") + except ValueError as err: + raise errors.ValidationError(message=f"Invalid value for parameter 'page': {page_parameter}") from err if page < 1: raise errors.ValidationError(message="Parameter 'page' must be a natural number") per_page_parameter = cast(int | str, query_args.get("per_page", default_number_of_elements_per_page)) try: per_page = int(per_page_parameter) - except ValueError: - raise errors.ValidationError(message=f"Invalid value for parameter 'per_page': {per_page_parameter}") + except ValueError as err: + raise errors.ValidationError( + message=f"Invalid value for parameter 'per_page': {per_page_parameter}" + ) from err if per_page < 1 or per_page > 100: raise errors.ValidationError(message="Parameter 'per_page' must be between 1 and 100") @@ -94,3 +98,36 @@ async def decorated_function(request: Request, *args: _P.args, **kwargs: _P.kwar return json(items, headers=pagination.as_header()) return decorated_function + + +_T = TypeVar("_T") + + +async def paginate_queries( + req: PaginationRequest, session: AsyncSession, stmts: list[tuple[Select[tuple[_T]], int]] +) -> list[_T]: + """Paginate several different queries as if they were part of a single table.""" + # NOTE: We ignore the possibility that a count for a statement is not accurate. I.e. the count + # says that the statement should return 10 items but the statement truly returns 8 or vice-versa. + # To fully account for edge cases of inaccuracry in the expected number of results + # we would have to run every query passed in - even though the offset is so high that we would only need + # to run 1 or 2 queries out of a large list. + output: list[_T] = [] + max_offset = 0 + stmt_offset = 0 + offset_discount = 0 + for stmt, stmt_cnt in stmts: + max_offset += stmt_cnt + if req.offset >= max_offset: + offset_discount += stmt_cnt + continue + stmt_offset = req.offset - offset_discount if req.offset > 0 else 0 + res_scalar = await session.scalars(stmt.offset(stmt_offset).limit(req.per_page)) + res = res_scalar.all() + num_required = req.per_page - len(output) + if num_required >= len(res): + output.extend(res) + else: + output.extend(res[:num_required]) + return output + return output diff --git a/components/renku_data_services/base_models/__init__.py b/components/renku_data_services/base_models/__init__.py index 203a4f0ab..0291438bb 100644 --- a/components/renku_data_services/base_models/__init__.py +++ b/components/renku_data_services/base_models/__init__.py @@ -1,3 +1,3 @@ """Common models for all services.""" -from renku_data_services.base_models.core import * # noqa: F401, F403 +from renku_data_services.base_models.core import * # noqa: F403 diff --git a/components/renku_data_services/base_models/core.py b/components/renku_data_services/base_models/core.py index d5c50f648..5d45e8cf9 100644 --- a/components/renku_data_services/base_models/core.py +++ b/components/renku_data_services/base_models/core.py @@ -1,11 +1,13 @@ """Base models shared by services.""" +from __future__ import annotations + import re import unicodedata from dataclasses import dataclass, field from datetime import datetime from enum import Enum, StrEnum -from typing import ClassVar, NewType, Optional, Protocol, Self, TypeVar +from typing import ClassVar, Never, NewType, Optional, Protocol, Self, TypeVar, overload from sanic import Request @@ -14,9 +16,14 @@ @dataclass(kw_only=True, frozen=True) class APIUser: - """The model for a user of the API, used for authentication.""" + """The model for a user of the API, used for authentication. + + The id field represents the sub claim from Keycloak - i.e. the Keycloak user id. + In the case of anonymous users the id is the random id generated by the gateway or + the data services if the request bypassed the gateway. + """ - id: str | None = None # the sub claim in the access token - i.e. the Keycloak user ID + id: str | None = None access_token: str | None = field(repr=False, default=None) refresh_token: str | None = field(repr=False, default=None) full_name: str | None = None @@ -28,13 +35,13 @@ class APIUser: @property def is_authenticated(self) -> bool: - """Indicates whether the user has successfully logged in.""" - return self.id is not None + """Indicates whether the user has successfully logged in, anonymous users are not authenticated.""" + return self.id is not None and not self.is_anonymous @property def is_anonymous(self) -> bool: """Indicates whether the user is anonymous.""" - return isinstance(self, AnonymousAPIUser) + return isinstance(self, AnonymousAPIUser) or self.access_token is None def get_full_name(self) -> str | None: """Generate the closest thing to a full name if the full name field is not set.""" @@ -75,6 +82,8 @@ class ServiceAdminId(StrEnum): migrations = "migrations" secrets_rotation = "secrets_rotation" + k8s_watcher = "k8s_watcher" + search_reprovision = "search_reprovision" @dataclass(kw_only=True, frozen=True) @@ -119,7 +128,7 @@ async def filter_projects_by_access_level( class UserStore(Protocol): """The interface through which Keycloak or a similar application can be accessed.""" - async def get_user_by_id(self, id: str, access_token: str) -> Optional["User"]: + async def get_user_by_id(self, id: str, access_token: str) -> Optional[User]: """Get a user by their unique Keycloak user ID.""" ... @@ -133,7 +142,7 @@ class User: no_default_access: bool = False @classmethod - def from_dict(cls, data: dict) -> "User": + def from_dict(cls, data: dict) -> User: """Create the model from a plain dictionary.""" return cls(**data) @@ -154,7 +163,7 @@ class Slug: def __init__(self, value: str) -> None: if not re.match(self._regex, value): raise errors.ValidationError(message=f"The slug {value} does not match the regex {self._regex}") - object.__setattr__(self, "value", value.lower()) + object.__setattr__(self, "value", value) @classmethod def from_name(cls, name: str) -> Self: @@ -163,7 +172,7 @@ def from_name(cls, name: str) -> Self: no_space = re.sub(r"\s+", "-", lower_case) normalized = unicodedata.normalize("NFKD", no_space).encode("ascii", "ignore").decode("utf-8") valid_chars_pattern = [r"\w", ".", "_", "-"] - no_invalid_characters = re.sub(f'[^{"".join(valid_chars_pattern)}]', "-", normalized) + no_invalid_characters = re.sub(f"[^{''.join(valid_chars_pattern)}]", "-", normalized) no_duplicates = re.sub(r"([._-])[._-]+", r"\1", no_invalid_characters) valid_start = re.sub(r"^[._-]", "", no_duplicates) valid_end = re.sub(r"[._-]$", "", valid_start) @@ -192,13 +201,190 @@ def from_user(cls, email: str | None, first_name: str | None, last_name: str | N slug = slug[:80] return cls.from_name(slug) - def __true_div__(self, other: "Slug") -> str: - """Joins two slugs into a path fraction without dashes at the beginning or end.""" - if type(self) is not type(other): + def __str__(self) -> str: + return self.value + + def __repr__(self) -> str: + return self.value + + +class NamespaceSlug(Slug): + """The slug for a group or user namespace.""" + + +class ProjectSlug(Slug): + """The slug for a project.""" + + +class DataConnectorSlug(Slug): + """The slug for a data connector.""" + + +class __NamespaceCommonMixin: + def __repr__(self) -> str: + return "/".join([i.value for i in self.to_list()]) + + def __getitem__(self, ind: int) -> Slug: + return self.to_list()[ind] + + def __len__(self) -> int: + return len(self.to_list()) + + def to_list(self) -> list[Slug]: + raise NotImplementedError + + def serialize(self) -> str: + return "/".join([i.value for i in self.to_list()]) + + +@dataclass(frozen=True, eq=True, repr=False) +class NamespacePath(__NamespaceCommonMixin): + """The slug that makes up the path to a user or group namespace in Renku.""" + + __match_args__ = ("first",) + first: NamespaceSlug + + @overload + def __truediv__(self, other: ProjectSlug) -> ProjectPath: ... + @overload + def __truediv__(self, other: DataConnectorSlug) -> DataConnectorPath: ... + + def __truediv__(self, other: ProjectSlug | DataConnectorSlug) -> ProjectPath | DataConnectorPath: + """Create new entity path with an extra slug.""" + if isinstance(other, ProjectSlug): + return ProjectPath(self.first, other) + elif isinstance(other, DataConnectorSlug): + return DataConnectorPath(self.first, other) + else: + raise errors.ProgrammingError(message=f"A path for a namespace cannot be further joined with {other}") + + def to_list(self) -> list[Slug]: + """Convert to list of slugs.""" + return [self.first] + + def parent(self) -> Never: + """The parent path.""" + raise errors.ProgrammingError(message="A namespace path has no parent") + + def last(self) -> NamespaceSlug: + """Return the last slug in the path.""" + return self.first + + @classmethod + def from_strings(cls, *slugs: str) -> Self: + """Convert a string to a namespace path.""" + if len(slugs) != 1: + raise errors.ValidationError(message=f"One slug string is needed to create a namespace path, got {slugs}.") + return cls(NamespaceSlug(slugs[0])) + + +@dataclass(frozen=True, eq=True, repr=False) +class ProjectPath(__NamespaceCommonMixin): + """The collection of slugs that makes up the path to a project in Renku.""" + + __match_args__ = ("first", "second") + first: NamespaceSlug + second: ProjectSlug + + def __truediv__(self, other: DataConnectorSlug) -> DataConnectorInProjectPath: + """Create new entity path with an extra slug.""" + if not isinstance(other, DataConnectorSlug): raise errors.ValidationError( - message=f"A path can be constructed only from 2 slugs, but the 'divisor' is of type {type(other)}" + message=f"A project path can only be joined with a data connector slug, but got {other}" ) - return self.value + "/" + other.value + return DataConnectorInProjectPath(self.first, self.second, other) + + def to_list(self) -> list[Slug]: + """Convert to list of slugs.""" + return [self.first, self.second] + + def parent(self) -> NamespacePath: + """The parent path.""" + return NamespacePath(self.first) + + def last(self) -> ProjectSlug: + """Return the last slug in the path.""" + return self.second + + @classmethod + def from_strings(cls, *slugs: str) -> Self: + """Convert strings to a project path.""" + if len(slugs) != 2: + raise errors.ValidationError(message=f"Two slug strings are needed to create a project path, got {slugs}.") + return cls(NamespaceSlug(slugs[0]), ProjectSlug(slugs[1])) + + +@dataclass(frozen=True, eq=True, repr=False) +class DataConnectorPath(__NamespaceCommonMixin): + """The collection of slugs that makes up the path to a data connector in a user or group in Renku.""" + + __match_args__ = ("first", "second") + first: NamespaceSlug + second: DataConnectorSlug + + def __truediv__(self, other: Never) -> Never: + """Create new entity path with an extra slug.""" + raise errors.ProgrammingError( + message="A path for a data connector in a user or group cannot be further joined with more slugs" + ) + + def to_list(self) -> list[Slug]: + """Convert to list of slugs.""" + return [self.first, self.second] + + def parent(self) -> NamespacePath: + """The parent path.""" + return NamespacePath(self.first) + + def last(self) -> DataConnectorSlug: + """Return the last slug in the path.""" + return self.second + + @classmethod + def from_strings(cls, *slugs: str) -> Self: + """Convert strings to a data connector path.""" + if len(slugs) != 2: + raise errors.ValidationError( + message=f"Two slug strings are needed to create a data connector path, got {slugs}." + ) + return cls(NamespaceSlug(slugs[0]), DataConnectorSlug(slugs[1])) + + +@dataclass(frozen=True, eq=True, repr=False) +class DataConnectorInProjectPath(__NamespaceCommonMixin): + """The collection of slugs that makes up the path to a data connector in a projectj in Renku.""" + + __match_args__ = ("first", "second", "third") + first: NamespaceSlug + second: ProjectSlug + third: DataConnectorSlug + + def __truediv__(self, other: Never) -> Never: + """Create new entity path with an extra slug.""" + raise errors.ProgrammingError( + message="A path for a data connector in a project cannot be further joined with more slugs" + ) + + def to_list(self) -> list[Slug]: + """Convert to list of slugs.""" + return [self.first, self.second, self.third] + + def parent(self) -> ProjectPath: + """The parent path.""" + return ProjectPath(self.first, self.second) + + def last(self) -> DataConnectorSlug: + """Return the last slug in the path.""" + return self.third + + @classmethod + def from_strings(cls, *slugs: str) -> Self: + """Convert strings to a data connector path.""" + if len(slugs) != 3: + raise errors.ValidationError( + message=f"Three slug strings are needed to create a data connector in project path, got {slugs}." + ) + return cls(NamespaceSlug(slugs[0]), ProjectSlug(slugs[1]), DataConnectorSlug(slugs[2])) AnyAPIUser = TypeVar("AnyAPIUser", bound=APIUser, covariant=True) @@ -221,3 +407,15 @@ async def authenticate(self, access_token: str, request: Request) -> AnyAPIUser: RESET: ResetType = ResetType(object()) """The single instance of the ResetType, can be compared to similar to None, i.e. `if value is RESET`""" + + +class ResourceType(StrEnum): + """All possible resources stored in Authzed.""" + + project = "project" + user = "user" + anonymous_user = "anonymous_user" + platform = "platform" + group = "group" + user_namespace = "user_namespace" + data_connector = "data_connector" diff --git a/components/renku_data_services/base_models/metrics.py b/components/renku_data_services/base_models/metrics.py new file mode 100644 index 000000000..fbf4cb16d --- /dev/null +++ b/components/renku_data_services/base_models/metrics.py @@ -0,0 +1,103 @@ +"""Protocol for metrics service.""" + +from enum import StrEnum +from typing import Protocol + +from renku_data_services.base_models.core import APIUser + + +class MetricsEvent(StrEnum): + """The different types of metrics events.""" + + code_repo_linked_to_project = "code_repo_linked_to_project" + data_connector_created = "data_connector_created" + data_connector_linked = "data_connector_linked" + group_created = "group_created" + group_member_added = "group_member_added" + project_created = "project_created" + project_member_added = "project_member_added" + search_queried = "search_queried" + session_hibernated = "session_hibernated" + session_launcher_created = "session_launcher_created" + session_resumed = "session_resumed" + session_started = "session_started" + session_stopped = "session_stopped" + user_requested_session_launch = "user_requested_session_launch" + user_requested_session_resume = "user_requested_session_resume" + + +type MetricsMetadata = dict[str, str | int | bool] + + +class MetricsService(Protocol): + """Protocol for sending product metrics.""" + + async def session_started(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send session start event to metrics.""" + ... + + async def session_resumed(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send session resumed event to metrics.""" + ... + + async def session_hibernated(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send session paused event to metrics.""" + ... + + async def session_stopped(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send session stopped event to metrics.""" + ... + + async def session_launcher_created( + self, user: APIUser, environment_kind: str, environment_image_source: str + ) -> None: + """Send session launcher created event to metrics.""" + ... + + async def project_created(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send project created event to metrics.""" + ... + + async def code_repo_linked_to_project(self, user: APIUser) -> None: + """Send code linked to project event to metrics.""" + ... + + async def data_connector_created(self, user: APIUser) -> None: + """Send data connector created event to metrics.""" + ... + + async def data_connector_linked(self, user: APIUser) -> None: + """Send data connector linked event to metrics.""" + ... + + async def project_member_added(self, user: APIUser) -> None: + """Send project member added event to metrics.""" + ... + + async def group_created(self, user: APIUser) -> None: + """Send group created event to metrics.""" + ... + + async def group_member_added(self, user: APIUser) -> None: + """Send group member added event to metrics.""" + ... + + async def search_queried(self, user: APIUser) -> None: + """Send search queried event to metrics.""" + ... + + async def user_requested_session_launch(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send event about user requesting session launch.""" + ... + + async def user_requested_session_resume(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send event about user requesting session resume.""" + ... + + +class ProjectCreationType(StrEnum): + """The different types of project creation metrics.""" + + new = "new" + migrated = "migrated" + copied = "copied" diff --git a/components/renku_data_services/base_models/nel.py b/components/renku_data_services/base_models/nel.py new file mode 100644 index 000000000..93594d75d --- /dev/null +++ b/components/renku_data_services/base_models/nel.py @@ -0,0 +1,107 @@ +"""Non empty list.""" + +from __future__ import annotations + +from collections.abc import Callable, Iterable, Iterator, Sequence +from dataclasses import dataclass +from dataclasses import field as data_field +from typing import Never, overload + + +@dataclass +class Nel[A](Sequence[A]): + """A non empty list.""" + + value: A + more_values: Sequence[A] = data_field(default_factory=list) + + @classmethod + def of(cls, el: A, *args: A) -> Nel[A]: + """Constructor using varargs.""" + return Nel(value=el, more_values=list(args)) + + @classmethod + def unsafe_from_list(cls, els: Sequence[A]) -> Nel[A]: + """Creates a non-empty list from a list, failing if the argument is empty.""" + return Nel(els[0], els[1:]) + + @classmethod + def from_list(cls, els: Sequence[A]) -> Nel[A] | None: + """Creates a non-empty list from a list.""" + if els == []: + return None + else: + return cls.unsafe_from_list(els) + + def __iter__(self) -> Iterator[A]: + return _NelIterator(self.value, self.more_values) + + @overload + def __getitem__(self, key: int) -> A: ... + @overload + def __getitem__(self, key: slice[int]) -> Never: ... + + def __getitem__(self, key: int | slice[int]) -> A | Sequence[A]: + if isinstance(key, slice): + raise NotImplementedError("slicing non-empty lists is not supported") + if key == 0: + return self.value + else: + return self.more_values[key - 1] + + def __len__(self) -> int: + return len(self.more_values) + 1 + + def append(self, other: Iterable[A]) -> Nel[A]: + """Append other to this list.""" + if not other: + return self + else: + remain = [*self.more_values, *other] + return Nel(self.value, remain) + + def to_list(self) -> list[A]: + """Convert to a list.""" + lst = [self.value] + lst.extend(self.more_values) + return lst + + def to_set(self) -> set[A]: + """Convert to a set.""" + return set(self.more_values) | {self.value} + + def mk_string(self, sep: str, f: Callable[[A], str] = str) -> str: + """Create a str from all elements mapped over f.""" + return sep.join([f(x) for x in self]) + + def map[B](self, f: Callable[[A], B]) -> Nel[B]: + """Maps `f` over this list.""" + head = f(self.value) + rest = [f(x) for x in self.more_values] + return Nel(head, rest) + + +class _NelIterator[A](Iterator[A]): + """Iterator for non empty lists.""" + + def __init__(self, head: A, tail: Sequence[A]) -> None: + self._head = head + self._tail = tail + self._tail_len = len(tail) + self._index = 0 + + def __iter__(self) -> Iterator[A]: + return self + + def __next__(self) -> A: + if self._index == 0: + self._index += 1 + return self._head + else: + idx = self._index - 1 + if idx < self._tail_len: + item = self._tail[idx] + self._index += 1 + return item + else: + raise StopIteration diff --git a/components/renku_data_services/base_models/validation.py b/components/renku_data_services/base_models/validation.py index 5d4cf267f..5d0bc00d0 100644 --- a/components/renku_data_services/base_models/validation.py +++ b/components/renku_data_services/base_models/validation.py @@ -15,10 +15,14 @@ def validate_and_dump( model: type[BaseModel], data: Any, exclude_none: bool = True, + **kwargs: Any, ) -> Any: - """Validate and dump with a pydantic model, ensuring proper validation errors.""" + """Validate and dump with a pydantic model, ensuring proper validation errors. + + kwargs are passed on to the pydantic model `model_dump` method. + """ try: - body = model.model_validate(data).model_dump(exclude_none=exclude_none, mode="json") + body = model.model_validate(data).model_dump(exclude_none=exclude_none, mode="json", **kwargs) except PydanticValidationError as err: parts = [".".join(str(i) for i in field["loc"]) + ": " + field["msg"] for field in err.errors()] message = ( @@ -36,11 +40,12 @@ def validated_json( content_type: str = "application/json", dumps: Callable[..., str] | None = None, exclude_none: bool = True, + model_dump_kwargs: dict[str, Any] | None = None, **kwargs: Any, ) -> JSONResponse: """Creates a JSON response with data validation. If the input data fails validation, an HTTP status code 500 will be raised. """ - body = validate_and_dump(model, data, exclude_none) + body = validate_and_dump(model, data, exclude_none, **(model_dump_kwargs or {})) return json(body, status=status, headers=headers, content_type=content_type, dumps=dumps, **kwargs) diff --git a/components/renku_data_services/connected_services/apispec.py b/components/renku_data_services/connected_services/apispec.py index 871c08e57..bac27d378 100644 --- a/components/renku_data_services/connected_services/apispec.py +++ b/components/renku_data_services/connected_services/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-08-22T08:15:52+00:00 +# timestamp: 2025-03-19T10:21:11+00:00 from __future__ import annotations @@ -52,11 +52,13 @@ class PaginationRequest(BaseAPISpec): class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): @@ -88,35 +90,35 @@ class Provider(BaseAPISpec): id: str = Field( ..., description='ID of a OAuth2 provider, e.g. "gitlab.com".', - example="some-id", + examples=["some-id"], ) kind: ProviderKind app_slug: str = Field( ..., description="URL-friendly name of the application. This field only applies to\nGitHub Applications. The slug is provided by GitHub when\nsetting up a GitHub App.\n", - example="my-application", + examples=["my-application"], ) client_id: str = Field( ..., description="Client ID or Application ID value. This is provided by\nthe Resource Server when setting up a new OAuth2 Client.\n", - example="some-client-id", + examples=["some-client-id"], ) client_secret: Optional[str] = Field( None, description="Client secret provided by the Resource Server when setting\nup a new OAuth2 Client.\n", - example="some-client-secret", + examples=["some-client-secret"], ) - display_name: str = Field(..., example="my oauth2 application") - scope: str = Field(..., example="api") + display_name: str = Field(..., examples=["my oauth2 application"]) + scope: str = Field(..., examples=["api"]) url: str = Field( ..., description='The base URL of the OAuth2 Resource Server, e.g. "https://gitlab.com".\n', - example="https://example.org", + examples=["https://example.org"], ) use_pkce: bool = Field( ..., description="Whether or not to use PKCE during authorization flows.\n", - example=False, + examples=[False], ) @@ -127,35 +129,35 @@ class ProviderPost(BaseAPISpec): id: str = Field( ..., description='ID of a OAuth2 provider, e.g. "gitlab.com".', - example="some-id", + examples=["some-id"], ) kind: ProviderKind app_slug: Optional[str] = Field( None, description="URL-friendly name of the application. This field only applies to\nGitHub Applications. The slug is provided by GitHub when\nsetting up a GitHub App.\n", - example="my-application", + examples=["my-application"], ) client_id: str = Field( ..., description="Client ID or Application ID value. This is provided by\nthe Resource Server when setting up a new OAuth2 Client.\n", - example="some-client-id", + examples=["some-client-id"], ) client_secret: Optional[str] = Field( None, description="Client secret provided by the Resource Server when setting\nup a new OAuth2 Client.\n", - example="some-client-secret", + examples=["some-client-secret"], ) - display_name: str = Field(..., example="my oauth2 application") - scope: str = Field(..., example="api") + display_name: str = Field(..., examples=["my oauth2 application"]) + scope: str = Field(..., examples=["api"]) url: str = Field( ..., description='The base URL of the OAuth2 Resource Server, e.g. "https://gitlab.com".\n', - example="https://example.org", + examples=["https://example.org"], ) use_pkce: Optional[bool] = Field( None, description="Whether or not to use PKCE during authorization flows.\n", - example=False, + examples=[False], ) @@ -167,29 +169,29 @@ class ProviderPatch(BaseAPISpec): app_slug: Optional[str] = Field( None, description="URL-friendly name of the application. This field only applies to\nGitHub Applications. The slug is provided by GitHub when\nsetting up a GitHub App.\n", - example="my-application", + examples=["my-application"], ) client_id: Optional[str] = Field( None, description="Client ID or Application ID value. This is provided by\nthe Resource Server when setting up a new OAuth2 Client.\n", - example="some-client-id", + examples=["some-client-id"], ) client_secret: Optional[str] = Field( None, description="Client secret provided by the Resource Server when setting\nup a new OAuth2 Client.\n", - example="some-client-secret", + examples=["some-client-secret"], ) - display_name: Optional[str] = Field(None, example="my oauth2 application") - scope: Optional[str] = Field(None, example="api") + display_name: Optional[str] = Field(None, examples=["my oauth2 application"]) + scope: Optional[str] = Field(None, examples=["api"]) url: Optional[str] = Field( None, description='The base URL of the OAuth2 Resource Server, e.g. "https://gitlab.com".\n', - example="https://example.org", + examples=["https://example.org"], ) use_pkce: Optional[bool] = Field( None, description="Whether or not to use PKCE during authorization flows.\n", - example=False, + examples=[False], ) @@ -207,7 +209,7 @@ class Connection(BaseAPISpec): provider_id: str = Field( ..., description='ID of a OAuth2 provider, e.g. "gitlab.com".', - example="some-id", + examples=["some-id"], ) status: ConnectionStatus @@ -216,11 +218,11 @@ class ConnectedAccount(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - username: str = Field(..., example="some-username") + username: str = Field(..., examples=["some-username"]) web_url: str = Field( ..., description="A URL which can be opened in a browser, i.e. a web page.", - example="https://example.org", + examples=["https://example.org"], ) diff --git a/components/renku_data_services/connected_services/blueprints.py b/components/renku_data_services/connected_services/blueprints.py index 4cc156d20..69f431379 100644 --- a/components/renku_data_services/connected_services/blueprints.py +++ b/components/renku_data_services/connected_services/blueprints.py @@ -5,12 +5,12 @@ from urllib.parse import unquote, urlparse, urlunparse from sanic import HTTPResponse, Request, json, redirect -from sanic.log import logger from sanic.response import JSONResponse from sanic_ext import validate from ulid import ULID import renku_data_services.base_models as base_models +from renku_data_services.app_config import logging from renku_data_services.base_api.auth import authenticate, only_admins, only_authenticated from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint from renku_data_services.base_api.misc import validate_query @@ -21,6 +21,8 @@ from renku_data_services.connected_services.core import validate_oauth2_client_patch from renku_data_services.connected_services.db import ConnectedServicesRepository +logger = logging.getLogger(__name__) + @dataclass(kw_only=True) class OAuth2ClientsBP(CustomBlueprint): diff --git a/components/renku_data_services/connected_services/db.py b/components/renku_data_services/connected_services/db.py index 929afc76b..c123ab6a8 100644 --- a/components/renku_data_services/connected_services/db.py +++ b/components/renku_data_services/connected_services/db.py @@ -7,8 +7,7 @@ from urllib.parse import urljoin from authlib.integrations.base_client import InvalidTokenError -from authlib.integrations.httpx_client import AsyncOAuth2Client -from sanic.log import logger +from authlib.integrations.httpx_client import AsyncOAuth2Client, OAuthError from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload @@ -16,6 +15,7 @@ import renku_data_services.base_models as base_models from renku_data_services import errors +from renku_data_services.app_config import logging from renku_data_services.base_api.pagination import PaginationRequest from renku_data_services.connected_services import apispec, models from renku_data_services.connected_services import orm as schemas @@ -28,6 +28,8 @@ from renku_data_services.connected_services.utils import generate_code_verifier from renku_data_services.utils.cryptography import decrypt_string, encrypt_string +logger = logging.getLogger(__name__) + class ConnectedServicesRepository: """Repository for connected services.""" @@ -63,7 +65,7 @@ async def get_oauth2_client(self, provider_id: str, user: base_models.APIUser) - client = result.one_or_none() if client is None: raise errors.MissingResourceError( - message=f"OAuth2 Client with id '{provider_id}' does not exist or you do not have access to it." # noqa: E501 + message=f"OAuth2 Client with id '{provider_id}' does not exist or you do not have access to it." ) return client.dump(user_is_admin=user.is_admin) @@ -302,7 +304,7 @@ async def get_oauth2_connection(self, connection_id: ULID, user: base_models.API """Get one OAuth2 connection from the database.""" if not user.is_authenticated or user.id is None: raise errors.MissingResourceError( - message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." # noqa: E501 + message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." ) async with self.session_maker() as session: @@ -343,7 +345,16 @@ async def get_oauth2_connection_token( ) -> models.OAuth2TokenSet: """Get the OAuth2 access token from one connection from the database.""" async with self.get_async_oauth2_client(connection_id=connection_id, user=user) as (oauth2_client, _, _): - await oauth2_client.ensure_active_token(oauth2_client.token) + try: + await oauth2_client.ensure_active_token(oauth2_client.token) + except OAuthError as err: + if err.error == "bad_refresh_token": + raise errors.InvalidTokenError( + message="The refresh token for the connected service has expired or is invalid.", + detail=f"Please reconnect your integration for the service with ID {str(connection_id)} " + "and try again.", + ) from err + raise token_model = models.OAuth2TokenSet.from_dict(oauth2_client.token) return token_model @@ -360,7 +371,16 @@ async def get_oauth2_app_installations( if connection.client.kind == ProviderKind.github and isinstance(adapter, GitHubAdapter): request_url = urljoin(adapter.api_url, "user/installations") params = dict(page=pagination.page, per_page=pagination.per_page) - response = await oauth2_client.get(request_url, params=params, headers=adapter.api_common_headers) + try: + response = await oauth2_client.get(request_url, params=params, headers=adapter.api_common_headers) + except OAuthError as err: + if err.error == "bad_refresh_token": + raise errors.InvalidTokenError( + message="The refresh token for the connected service has expired or is invalid.", + detail=f"Please reconnect your integration for the service with ID {str(connection_id)} " + "and try again.", + ) from err + raise if response.status_code > 200: raise errors.UnauthorizedError(message="Could not get installation information.") @@ -376,7 +396,7 @@ async def get_async_oauth2_client( """Get the AsyncOAuth2Client for the given connection_id and user.""" if not user.is_authenticated or user.id is None: raise errors.MissingResourceError( - message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." # noqa: E501 + message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." ) async with self.session_maker() as session: diff --git a/components/renku_data_services/crc/api.spec.yaml b/components/renku_data_services/crc/api.spec.yaml index 047fb3248..d0cc060f2 100644 --- a/components/renku_data_services/crc/api.spec.yaml +++ b/components/renku_data_services/crc/api.spec.yaml @@ -9,219 +9,249 @@ info: servers: - url: /api/data paths: - /resource_pools: + /classes/{class_id}: get: - summary: Get all resource pool definitions + summary: Get a specific resource class parameters: - - in: query - description: query parameters - name: resource_pools_params - style: form - explode: true + - in: path + name: class_id + required: true schema: - type: object - additionalProperties: false - properties: - cpu: - $ref: "#/components/schemas/CpuFilter" - default: 0.0 - gpu: - $ref: "#/components/schemas/Gpu" - default: 0 - memory: - $ref: "#/components/schemas/MemoryFilter" - default: 0 - max_storage: - $ref: "#/components/schemas/StorageFilter" - default: 0 + type: string responses: "200": - description: The resource pool definitions + description: The resource class that was requested content: "application/json": schema: - $ref: "#/components/schemas/ResourcePoolsWithIdFiltered" + $ref: "#/components/schemas/ResourceClassWithId" + "404": + description: The resource class does not exist + content: + "application/json": + schema: + $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - - resource_pools + - classes + /clusters: + get: + summary: Get all the cluster configurations + responses: + "200": + description: The cluster configurations + content: + "application/json": + schema: + $ref: "#/components/schemas/ClustersWithId" + default: + $ref: "#/components/responses/Error" + tags: + - clusters post: - summary: Create a new resource pool + summary: Create a new cluster configuration requestBody: required: true content: - application/json: + "application/json": schema: - $ref: "#/components/schemas/ResourcePool" + $ref: "#/components/schemas/Cluster" responses: "201": - description: The resource pool was created + description: The cluster configuration was created content: "application/json": schema: - $ref: "#/components/schemas/ResourcePoolWithId" + $ref: "#/components/schemas/ClusterWithId" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - - resource_pools - /resource_pools/{resource_pool_id}: + - clusters + /clusters/{cluster_id}: get: - summary: Get a resource pool definition + summary: Get a cluster configuration parameters: - in: path - name: resource_pool_id + name: cluster_id required: true schema: - type: integer + $ref: "#/components/schemas/Ulid" responses: "200": - description: The resource pool definition + description: The cluster configuration content: "application/json": schema: - $ref: "#/components/schemas/ResourcePoolWithId" + $ref: "#/components/schemas/ClusterWithId" "404": - description: The resource pool does not exist + description: The cluster configuration does not exist content: "application/json": schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - - resource_pools + - clusters put: - summary: Update an existing resource pool + summary: Update an existing cluster configuration parameters: - in: path - name: resource_pool_id + name: cluster_id required: true schema: - type: integer + $ref: "#/components/schemas/Ulid" requestBody: required: true content: - application/json: + "application/json": schema: - $ref: "#/components/schemas/ResourcePoolPut" + $ref: "#/components/schemas/Cluster" responses: "200": - description: The resource pool definition + description: The cluster configuration content: "application/json": schema: - $ref: "#/components/schemas/ResourcePoolWithId" + $ref: "#/components/schemas/ClusterWithId" "404": - description: The resource pool does not exist + description: The cluster configuration does not exist content: "application/json": schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - - resource_pools + - clusters patch: - summary: Update specific fields of an existing resource pool + summary: Update specific fields of an existing cluster configuration parameters: - in: path - name: resource_pool_id + name: cluster_id required: true schema: - type: integer + $ref: "#/components/schemas/Ulid" requestBody: required: true content: - application/json: + "application/json": schema: - $ref: "#/components/schemas/ResourcePoolPatch" + $ref: "#/components/schemas/ClusterPatch" responses: "200": - description: The resource pool definition + description: The cluster configuration content: "application/json": schema: - $ref: "#/components/schemas/ResourcePoolWithId" + $ref: "#/components/schemas/ClusterWithId" "404": - description: The resource pool does not exist + description: The cluster configuration does not exist content: "application/json": schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - - resource_pools + - clusters delete: - summary: Remove a resource pool + summary: Remove a cluster configuration parameters: - in: path - name: resource_pool_id + name: cluster_id required: true schema: - type: integer + $ref: "#/components/schemas/Ulid" responses: "204": - description: The resource pool was removed or did not exist in the first place + description: The cluster configuration was removed or did not exist in the first place default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - - resource_pools - /resource_pools/{resource_pool_id}/users: + - clusters + /error: get: - summary: Get all users that have access to a resource pool + summary: Get a sample error response with status code 422 + responses: + "422": + description: The error + content: + "application/json": + schema: + $ref: "#/components/schemas/ErrorResponse" + /resource_pools: + get: + summary: Get all resource pool definitions parameters: - - in: path - name: resource_pool_id - required: true + - in: query + description: query parameters + name: resource_pools_params + style: form + explode: true schema: - type: integer + type: object + additionalProperties: false + properties: + cpu: + $ref: "#/components/schemas/CpuFilter" + default: 0.0 + gpu: + $ref: "#/components/schemas/Gpu" + default: 0 + memory: + $ref: "#/components/schemas/MemoryFilter" + default: 0 + max_storage: + $ref: "#/components/schemas/StorageFilter" + default: 0 responses: "200": - description: The list of users + description: The resource pool definitions content: "application/json": schema: - $ref: "#/components/schemas/PoolUsersWithId" - "404": - description: The resource pool does not exist + $ref: "#/components/schemas/ResourcePoolsWithIdFiltered" + default: + $ref: "#/components/responses/Error" + tags: + - resource_pools + post: + summary: Create a new resource pool + requestBody: + required: true + content: + "application/json": + schema: + $ref: "#/components/schemas/ResourcePool" + responses: + "201": + description: The resource pool was created content: "application/json": schema: - $ref: "#/components/schemas/ErrorResponse" + $ref: "#/components/schemas/ResourcePoolWithId" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - resource_pools - post: - summary: Add users to the list of users that have access to the resource pool + /resource_pools/{resource_pool_id}: + get: + summary: Get a resource pool definition parameters: - in: path name: resource_pool_id required: true schema: type: integer - requestBody: - description: List of user Ids - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/PoolUsersWithId" - example: - - id: 543-user-id - - id: 123-some-user responses: - "201": - description: The list of users was updated + "200": + description: The resource pool definition content: "application/json": schema: - $ref: "#/components/schemas/PoolUsersWithId" - example: - - id: 123-user-id - - id: user-id - - id: 456-user-id + $ref: "#/components/schemas/ResourcePoolWithId" "404": description: The resource pool does not exist content: @@ -229,11 +259,11 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - resource_pools put: - summary: Set the list of users that has access to the resource pool + summary: Update an existing resource pool parameters: - in: path name: resource_pool_id @@ -241,19 +271,18 @@ paths: schema: type: integer requestBody: - description: List of user Ids required: true content: - application/json: + "application/json": schema: - $ref: "#/components/schemas/PoolUsersWithId" + $ref: "#/components/schemas/ResourcePoolPut" responses: "200": - description: The list of users was updated + description: The resource pool definition content: "application/json": schema: - $ref: "#/components/schemas/PoolUsersWithId" + $ref: "#/components/schemas/ResourcePoolWithId" "404": description: The resource pool does not exist content: @@ -261,58 +290,53 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - resource_pools - /resource_pools/{resource_pool_id}/users/{user_id}: - get: - summary: Check if a specific user belongs to a specific resource pool + patch: + summary: Update specific fields of an existing resource pool parameters: - in: path name: resource_pool_id required: true schema: type: integer - - in: path - name: user_id - required: true - schema: - type: string + requestBody: + required: true + content: + "application/json": + schema: + $ref: "#/components/schemas/ResourcePoolPatch" responses: "200": - description: The user belongs to the resource pool + description: The resource pool definition content: "application/json": schema: - $ref: "#/components/schemas/PoolUserWithId" + $ref: "#/components/schemas/ResourcePoolWithId" "404": - description: The user does not belong to the resource pool, or the resource pool or user do not exist + description: The resource pool does not exist content: "application/json": schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - resource_pools delete: - summary: Remove a specific user from a specific resource pool + summary: Remove a resource pool parameters: - in: path name: resource_pool_id required: true schema: type: integer - - in: path - name: user_id - required: true - schema: - type: string responses: "204": - description: The user was removed or it was not part of the pool + description: The resource pool was removed or did not exist in the first place default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - resource_pools /resource_pools/{resource_pool_id}/classes: @@ -349,7 +373,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes post: @@ -363,7 +387,7 @@ paths: requestBody: required: true content: - application/json: + "application/json": schema: $ref: "#/components/schemas/ResourceClass" responses: @@ -380,7 +404,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes /resource_pools/{resource_pool_id}/classes/{class_id}: @@ -411,7 +435,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes put: @@ -430,7 +454,7 @@ paths: requestBody: required: true content: - application/json: + "application/json": schema: $ref: "#/components/schemas/ResourceClass" responses: @@ -447,7 +471,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes patch: @@ -466,7 +490,7 @@ paths: requestBody: required: true content: - application/json: + "application/json": schema: $ref: "#/components/schemas/ResourceClassPatch" responses: @@ -483,7 +507,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes delete: @@ -503,7 +527,7 @@ paths: "204": description: The resource class was removed or did not exist in the first place default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes /resource_pools/{resource_pool_id}/classes/{class_id}/tolerations: @@ -528,7 +552,7 @@ paths: schema: $ref: "#/components/schemas/K8sLabelList" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes delete: @@ -548,7 +572,7 @@ paths: "204": description: The tolerations have been removed default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes /resource_pools/{resource_pool_id}/classes/{class_id}/node_affinities: @@ -573,7 +597,7 @@ paths: schema: $ref: "#/components/schemas/NodeAffinityListResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes delete: @@ -593,35 +617,157 @@ paths: "204": description: The node affinities have been removed default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - classes - /classes/{class_id}: + /resource_pools/{resource_pool_id}/users: get: - summary: Get a specific resource class + summary: Get all users that have access to a resource pool parameters: - in: path - name: class_id + name: resource_pool_id + required: true + schema: + type: integer + responses: + "200": + description: The list of users + content: + "application/json": + schema: + $ref: "#/components/schemas/PoolUsersWithId" + "404": + description: The resource pool does not exist + content: + "application/json": + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - resource_pools + post: + summary: Add users to the list of users that have access to the resource pool + parameters: + - in: path + name: resource_pool_id + required: true + schema: + type: integer + requestBody: + description: List of user Ids + required: true + content: + "application/json": + schema: + $ref: "#/components/schemas/PoolUsersWithId" + example: + - id: 543-user-id + - id: 123-some-user + responses: + "201": + description: The list of users was updated + content: + "application/json": + schema: + $ref: "#/components/schemas/PoolUsersWithId" + example: + - id: 123-user-id + - id: user-id + - id: 456-user-id + "404": + description: The resource pool does not exist + content: + "application/json": + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - resource_pools + put: + summary: Set the list of users that has access to the resource pool + parameters: + - in: path + name: resource_pool_id + required: true + schema: + type: integer + requestBody: + description: List of user Ids + required: true + content: + "application/json": + schema: + $ref: "#/components/schemas/PoolUsersWithId" + responses: + "200": + description: The list of users was updated + content: + "application/json": + schema: + $ref: "#/components/schemas/PoolUsersWithId" + "404": + description: The resource pool does not exist + content: + "application/json": + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - resource_pools + /resource_pools/{resource_pool_id}/users/{user_id}: + get: + summary: Check if a specific user belongs to a specific resource pool + parameters: + - in: path + name: resource_pool_id + required: true + schema: + type: integer + - in: path + name: user_id required: true schema: type: string responses: "200": - description: The resource class that was requested + description: The user belongs to the resource pool content: "application/json": schema: - $ref: "#/components/schemas/ResourceClassWithId" + $ref: "#/components/schemas/PoolUserWithId" "404": - description: The resource class does not exist + description: The user does not belong to the resource pool, or the resource pool or user do not exist content: "application/json": schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - - classes + - resource_pools + delete: + summary: Remove a specific user from a specific resource pool + parameters: + - in: path + name: resource_pool_id + required: true + schema: + type: integer + - in: path + name: user_id + required: true + schema: + type: string + responses: + "204": + description: The user was removed or it was not part of the pool + default: + $ref: "#/components/responses/Error" + tags: + - resource_pools /resource_pools/{resource_pool_id}/quota: get: summary: Get the quota associated with the resource pool @@ -645,7 +791,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - quota put: @@ -659,7 +805,7 @@ paths: requestBody: required: true content: - application/json: + "application/json": schema: $ref: "#/components/schemas/QuotaWithId" responses: @@ -676,7 +822,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - quota patch: @@ -690,7 +836,7 @@ paths: requestBody: required: true content: - application/json: + "application/json": schema: $ref: "#/components/schemas/QuotaPatch" responses: @@ -707,7 +853,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - quota /users/{user_id}/resource_pools: @@ -744,7 +890,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - users post: @@ -759,7 +905,7 @@ paths: description: List of resource pool IDs required: true content: - application/json: + "application/json": schema: $ref: "#/components/schemas/IntegerIds" responses: @@ -776,7 +922,7 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - users put: @@ -791,7 +937,7 @@ paths: description: List of resource pool IDs required: true content: - application/json: + "application/json": schema: $ref: "#/components/schemas/IntegerIds" responses: @@ -808,19 +954,9 @@ paths: schema: $ref: "#/components/schemas/ErrorResponse" default: - $ref: '#/components/responses/Error' + $ref: "#/components/responses/Error" tags: - users - /error: - get: - summary: Get a sample error response with status code 422 - responses: - "422": - description: The error - content: - "application/json": - schema: - $ref: "#/components/schemas/ErrorResponse" /version: get: summary: Get the version of the service @@ -834,6 +970,86 @@ paths: components: schemas: + Cluster: + type: object + additionalProperties: false + properties: + name: + $ref: "#/components/schemas/Name" + config_name: + $ref: "#/components/schemas/ConfigName" + session_protocol: + $ref: "#/components/schemas/Protocol" + session_host: + $ref: "#/components/schemas/Host" + session_port: + $ref: "#/components/schemas/Port" + session_path: + type: string + session_ingress_annotations: + $ref: "#/components/schemas/IngressAnnotations" + session_tls_secret_name: + $ref: "#/components/schemas/TlsSecretName" + session_storage_class: + $ref: "#/components/schemas/StorageClassName" + service_account_name: + $ref: "#/components/schemas/K8sResourceName" + required: [ "name", "config_name", "session_protocol", "session_host", "session_port", "session_path", "session_ingress_annotations", "session_tls_secret_name" ] + ClusterPatch: + type: object + additionalProperties: false + properties: + name: + $ref: "#/components/schemas/Name" + config_name: + $ref: "#/components/schemas/ConfigName" + session_protocol: + $ref: "#/components/schemas/Protocol" + session_host: + $ref: "#/components/schemas/Host" + session_port: + $ref: "#/components/schemas/Port" + session_path: + type: string + session_ingress_annotations: + $ref: "#/components/schemas/IngressAnnotations" + session_tls_secret_name: + $ref: "#/components/schemas/TlsSecretName" + session_storage_class: + $ref: "#/components/schemas/StorageClassName" + service_account_name: + $ref: "#/components/schemas/K8sResourceNamePatch" + ClusterWithId: + type: object + additionalProperties: false + properties: + name: + $ref: "#/components/schemas/Name" + config_name: + $ref: "#/components/schemas/ConfigName" + id: + $ref: "#/components/schemas/Ulid" + session_protocol: + $ref: "#/components/schemas/Protocol" + session_host: + $ref: "#/components/schemas/Host" + session_port: + $ref: "#/components/schemas/Port" + session_path: + type: string + session_ingress_annotations: + $ref: "#/components/schemas/IngressAnnotations" + session_tls_secret_name: + $ref: "#/components/schemas/TlsSecretName" + session_storage_class: + $ref: "#/components/schemas/StorageClassName" + service_account_name: + type: string + required: [ "name", "config_name", "session_protocol", "session_host", "session_port", "session_path", "session_ingress_annotations", "session_tls_secret_name", "id" ] + ClustersWithId: + type: array + items: + $ref: "#/components/schemas/ClusterWithId" ResourceClass: type: object additionalProperties: false @@ -1055,7 +1271,7 @@ components: additionalProperties: false properties: quota: - $ref: "#/components/schemas/Quota" + $ref: "#/components/schemas/QuotaWithOptionalId" classes: $ref: "#/components/schemas/ResourceClasses" name: @@ -1068,6 +1284,8 @@ components: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: $ref: "#/components/schemas/HibernationThreshold" + cluster_id: + $ref: "#/components/schemas/Ulid" required: ["classes", "name", "public", "default"] example: quota: @@ -1092,6 +1310,7 @@ components: default_storage: 2 default: false name: "resource pool name" + cluster_id: "change-me" ResourcePoolPatch: type: object additionalProperties: false @@ -1110,6 +1329,8 @@ components: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: $ref: "#/components/schemas/HibernationThreshold" + cluster_id: + $ref: "#/components/schemas/Ulid" example: quota: cpu: 50 @@ -1119,6 +1340,7 @@ components: cpu: 4.5 max_storage: 10000 name: "resource pool name" + cluster_id: "4QZ886777NTN8GHQ551GSVAXSA" ResourcePoolPut: type: object additionalProperties: false @@ -1137,6 +1359,8 @@ components: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: $ref: "#/components/schemas/HibernationThreshold" + cluster_id: + $ref: "#/components/schemas/Ulid" required: ["classes", "name", "public", "default"] example: quota: @@ -1160,6 +1384,7 @@ components: max_storage: 10000 id: 2 name: "resource pool name" + cluster_id: "4QZ886777NTN8GHQ551GSVAXSA" ResourcePoolWithId: type: object additionalProperties: false @@ -1182,6 +1407,12 @@ components: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: $ref: "#/components/schemas/HibernationThreshold" + cluster: + type: object + properties: + id: + $ref: "#/components/schemas/Ulid" + required: ["id"] required: ["classes", "name", "id", "public", "default"] example: quota: @@ -1206,6 +1437,7 @@ components: id: 2 name: "resource pool name" id: 1 + cluster_id: "4QZ886777NTN8GHQ551GSVAXSA" ResourcePoolWithIdFiltered: type: object additionalProperties: false @@ -1228,6 +1460,8 @@ components: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: $ref: "#/components/schemas/HibernationThreshold" + cluster_id: + $ref: "#/components/schemas/Ulid" required: ["classes", "name", "id", "public", "default"] example: quota: @@ -1255,7 +1489,7 @@ components: id: 2 matching: true name: "resource pool name" - id: 1 + cluster_id: "4QZ886777NTN8GHQ551GSVAXSA" ResourcePoolsWithId: type: array items: @@ -1302,7 +1536,7 @@ components: items: $ref: "#/components/schemas/PoolUserWithId" uniqueItems: true - Quota: + QuotaPatch: type: object additionalProperties: false properties: @@ -1312,12 +1546,10 @@ components: $ref: "#/components/schemas/Memory" gpu: $ref: "#/components/schemas/Gpu" - required: ["cpu", "memory", "gpu"] example: - cpu: 1.0 - memory: 4 gpu: 0 - QuotaPatch: + cpu: 100 + QuotaWithId: type: object additionalProperties: false properties: @@ -1327,10 +1559,15 @@ components: $ref: "#/components/schemas/Memory" gpu: $ref: "#/components/schemas/Gpu" + id: + $ref: "#/components/schemas/Name" example: gpu: 0 cpu: 100 - QuotaWithId: + memory: 1000 + id: 518c7d27-b5db-4aee-855f-f4638aded2d4 + required: ["cpu", "memory", "gpu", "id"] + QuotaWithOptionalId: type: object additionalProperties: false properties: @@ -1347,7 +1584,7 @@ components: cpu: 100 memory: 1000 id: 518c7d27-b5db-4aee-855f-f4638aded2d4 - required: ["cpu", "memory", "gpu", "id"] + required: ["cpu", "memory", "gpu"] Version: type: object properties: @@ -1426,6 +1663,14 @@ components: description: A name for a specific resource minLength: 5 example: "the name of a resource" + ConfigName: + type: string + description: | + The name of the Kubernetes configuration to use to connect to the remote cluster. This is currently used to find a file named `/`. + + This configuration is expected to have a default namespace defined. It will be used for all remote operations requiring a namespace, as well for namespaced objects. + pattern: "^[a-zA-Z0-9._-]+[.]yaml$" + example: "a-remote-cluster.yaml" DefaultFlag: type: boolean description: A default selection for resource classes or resource pools @@ -1459,9 +1704,32 @@ components: type: string description: A valid K8s label example: some-label-1 - pattern: '^[a-z0-9A-Z][a-z0-9A-Z-_./]*[a-z0-9A-Z]$' + pattern: "^[a-z0-9A-Z][a-z0-9A-Z-_./]*[a-z0-9A-Z]$" minLength: 3 maxLength: 63 + K8sResourceName: + type: string + description: | + A name of any K8s resource (i.e. Pod, Service, Secret, etc.). + This is pattern imposes the stricter rules applied to some resource type names + that need to follow DNS label standard and therefore can be used for all types. + Looser rules can be applied to a smaller subset of resource types. + example: some-k8s-resource + pattern: "^[a-z0-9][a-z0-9-]*[a-z0-9]$" + minLength: 1 + maxLength: 63 + K8sResourceNamePatch: + type: string + description: | + A name of any K8s resource (i.e. Pod, Service, Secret, etc.). + This is pattern imposes the stricter rules applied to some resource type names + that need to follow DNS label standard and therefore can be used for all types. + Looser rules can be applied to a smaller subset of resource types. An empty + string indicates that the value should be removed if present in the DB. + example: some-k8s-resource + pattern: "^[a-z0-9][a-z0-9-]*[a-z0-9]$" + minLength: 0 + maxLength: 63 NodeAffinity: type: object additionalProperties: false @@ -1513,13 +1781,41 @@ components: example: "Something went wrong - please try again later" required: ["code", "message"] required: ["error"] + Ulid: + description: ULID identifier + type: string + minLength: 26 + maxLength: 26 + pattern: "^[0-7][0-9A-HJKMNP-TV-Z]{25}$" # This is case-insensitive + Protocol: + description: Allowed Protocol strings + type: string + enum: [ "http", "https" ] + Port: + type: integer + minimum: 0 + maximum: 65536 + Host: + type: string + maxLength: 256 + # The following pattern accept things it should not, but this is better than no validation at all. + pattern: "^([0-9a-zA-Z:_-]+[.])*[0-9a-zA-Z:_-]+$" + TlsSecretName: + type: string + maxLength: 256 + StorageClassName: + type: string + maxLength: 256 + IngressAnnotations: + type: object + additionalProperties: true responses: Error: description: The schema for all 4xx and 5xx responses content: "application/json": schema: - $ref: '#/components/schemas/ErrorResponse' + $ref: "#/components/schemas/ErrorResponse" securitySchemes: bearer: scheme: bearer @@ -1528,6 +1824,6 @@ components: type: openIdConnect openIdConnectUrl: /auth/realms/Renku/.well-known/openid-configuration security: + - bearer: [] - oidc: - openid - - bearer: [] diff --git a/components/renku_data_services/crc/apispec.py b/components/renku_data_services/crc/apispec.py index 7a92d911b..b6b73859a 100644 --- a/components/renku_data_services/crc/apispec.py +++ b/components/renku_data_services/crc/apispec.py @@ -1,9 +1,10 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-11-26T09:51:56+00:00 +# timestamp: 2025-07-23T11:50:42+00:00 from __future__ import annotations +from enum import Enum from typing import List, Optional from pydantic import ConfigDict, Field, RootModel @@ -34,15 +35,24 @@ class Version(BaseAPISpec): version: str -class IntegerIds(RootModel[List[int]]): - root: List[int] = Field(..., example=[1, 3, 5], min_length=1) +class IntegerId(RootModel[int]): + root: int = Field( + ..., + description="An integer ID used to identify different resources", + examples=[1], + ge=0, + ) + + +class IntegerIds(RootModel[List[IntegerId]]): + root: List[IntegerId] = Field(..., examples=[[1, 3, 5]], min_length=1) class K8sLabel(RootModel[str]): root: str = Field( ..., description="A valid K8s label", - example="some-label-1", + examples=["some-label-1"], max_length=63, min_length=3, pattern="^[a-z0-9A-Z][a-z0-9A-Z-_./]*[a-z0-9A-Z]$", @@ -56,7 +66,7 @@ class NodeAffinity(BaseAPISpec): key: str = Field( ..., description="A valid K8s label", - example="some-label-1", + examples=["some-label-1"], max_length=63, min_length=3, pattern="^[a-z0-9A-Z][a-z0-9A-Z-_./]*[a-z0-9A-Z]$", @@ -68,42 +78,56 @@ class NodeAffinityListResponse(RootModel[List[NodeAffinity]]): root: List[NodeAffinity] = Field( [], description="A list of k8s labels used for tolerations and/or node affinity", - example=[{"key": "test-label-1", "required_during_scheduling": False}], + examples=[[{"key": "test-label-1", "required_during_scheduling": False}]], min_length=0, ) class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): error: Error +class Protocol(Enum): + http = "http" + https = "https" + + +class IngressAnnotations(BaseAPISpec): + pass + model_config = ConfigDict( + extra="allow", + ) + + class ResourcePoolsParams(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - cpu: float = Field(0.0, description="Number of cpu cores", example=10, ge=0.0) + cpu: float = Field(0.0, description="Number of cpu cores", examples=[10], ge=0.0) gpu: int = Field( - 0, description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + 0, description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) memory: int = Field( 0, description="Number of gigabytes of memory", - example=4, + examples=[4], ge=0, le=9223372036854775807, ) max_storage: int = Field( 0, description="Number of gigabytes of storage", - example=100, + examples=[100], ge=0, le=9223372036854775807, ) @@ -120,7 +144,7 @@ class ResourceClassParams(BaseAPISpec): name: Optional[str] = Field( None, description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) @@ -136,7 +160,7 @@ class UserResourceParams(BaseAPISpec): name: Optional[str] = Field( None, description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) @@ -145,6 +169,125 @@ class UsersUserIdResourcePoolsGetParametersQuery(BaseAPISpec): user_resource_params: Optional[UserResourceParams] = None +class Cluster(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + name: str = Field( + ..., + description="A name for a specific resource", + examples=["the name of a resource"], + min_length=5, + ) + config_name: str = Field( + ..., + description="The name of the Kubernetes configuration to use to connect to the remote cluster. This is currently used to find a file named `/`.\n\nThis configuration is expected to have a default namespace defined. It will be used for all remote operations requiring a namespace, as well for namespaced objects.\n", + examples=["a-remote-cluster.yaml"], + pattern="^[a-zA-Z0-9._-]+[.]yaml$", + ) + session_protocol: Protocol + session_host: str = Field( + ..., max_length=256, pattern="^([0-9a-zA-Z:_-]+[.])*[0-9a-zA-Z:_-]+$" + ) + session_port: int = Field(..., ge=0, le=65536) + session_path: str + session_ingress_annotations: IngressAnnotations + session_tls_secret_name: str = Field(..., max_length=256) + session_storage_class: Optional[str] = Field(None, max_length=256) + service_account_name: Optional[str] = Field( + None, + description="A name of any K8s resource (i.e. Pod, Service, Secret, etc.).\nThis is pattern imposes the stricter rules applied to some resource type names\nthat need to follow DNS label standard and therefore can be used for all types.\nLooser rules can be applied to a smaller subset of resource types.\n", + examples=["some-k8s-resource"], + max_length=63, + min_length=1, + pattern="^[a-z0-9][a-z0-9-]*[a-z0-9]$", + ) + + +class ClusterPatch(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + name: Optional[str] = Field( + None, + description="A name for a specific resource", + examples=["the name of a resource"], + min_length=5, + ) + config_name: Optional[str] = Field( + None, + description="The name of the Kubernetes configuration to use to connect to the remote cluster. This is currently used to find a file named `/`.\n\nThis configuration is expected to have a default namespace defined. It will be used for all remote operations requiring a namespace, as well for namespaced objects.\n", + examples=["a-remote-cluster.yaml"], + pattern="^[a-zA-Z0-9._-]+[.]yaml$", + ) + session_protocol: Optional[Protocol] = None + session_host: Optional[str] = Field( + None, max_length=256, pattern="^([0-9a-zA-Z:_-]+[.])*[0-9a-zA-Z:_-]+$" + ) + session_port: Optional[int] = Field(None, ge=0, le=65536) + session_path: Optional[str] = None + session_ingress_annotations: Optional[IngressAnnotations] = None + session_tls_secret_name: Optional[str] = Field(None, max_length=256) + session_storage_class: Optional[str] = Field(None, max_length=256) + service_account_name: Optional[str] = Field( + None, + description="A name of any K8s resource (i.e. Pod, Service, Secret, etc.).\nThis is pattern imposes the stricter rules applied to some resource type names\nthat need to follow DNS label standard and therefore can be used for all types.\nLooser rules can be applied to a smaller subset of resource types. An empty\nstring indicates that the value should be removed if present in the DB.\n", + examples=["some-k8s-resource"], + max_length=63, + min_length=0, + pattern="^[a-z0-9][a-z0-9-]*[a-z0-9]$", + ) + + +class ClusterWithId(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + name: str = Field( + ..., + description="A name for a specific resource", + examples=["the name of a resource"], + min_length=5, + ) + config_name: str = Field( + ..., + description="The name of the Kubernetes configuration to use to connect to the remote cluster. This is currently used to find a file named `/`.\n\nThis configuration is expected to have a default namespace defined. It will be used for all remote operations requiring a namespace, as well for namespaced objects.\n", + examples=["a-remote-cluster.yaml"], + pattern="^[a-zA-Z0-9._-]+[.]yaml$", + ) + id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + session_protocol: Protocol + session_host: str = Field( + ..., max_length=256, pattern="^([0-9a-zA-Z:_-]+[.])*[0-9a-zA-Z:_-]+$" + ) + session_port: int = Field(..., ge=0, le=65536) + session_path: str + session_ingress_annotations: IngressAnnotations + session_tls_secret_name: str = Field(..., max_length=256) + session_storage_class: Optional[str] = Field(None, max_length=256) + service_account_name: Optional[str] = None + + +class ClustersWithId(RootModel[List[ClusterWithId]]): + root: List[ClusterWithId] + + +class Cluster1(BaseAPISpec): + id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + + class PoolUserWithId(BaseAPISpec): model_config = ConfigDict( extra="forbid", @@ -152,7 +295,7 @@ class PoolUserWithId(BaseAPISpec): id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) no_default_access: bool = Field( @@ -165,61 +308,67 @@ class PoolUsersWithId(RootModel[List[PoolUserWithId]]): root: List[PoolUserWithId] -class Quota(BaseAPISpec): +class QuotaPatch(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - cpu: float = Field(..., description="Number of cpu cores", example=10, gt=0.0) - memory: int = Field( - ..., + cpu: Optional[float] = Field( + None, description="Number of cpu cores", examples=[10], gt=0.0 + ) + memory: Optional[int] = Field( + None, description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) - gpu: int = Field( - ..., description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + gpu: Optional[int] = Field( + None, description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) -class QuotaPatch(BaseAPISpec): +class QuotaWithId(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - cpu: Optional[float] = Field( - None, description="Number of cpu cores", example=10, gt=0.0 - ) - memory: Optional[int] = Field( - None, + cpu: float = Field(..., description="Number of cpu cores", examples=[10], gt=0.0) + memory: int = Field( + ..., description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) - gpu: Optional[int] = Field( - None, description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + gpu: int = Field( + ..., description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 + ) + id: str = Field( + ..., + description="A name for a specific resource", + examples=["the name of a resource"], + min_length=5, ) -class QuotaWithId(BaseAPISpec): +class QuotaWithOptionalId(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - cpu: float = Field(..., description="Number of cpu cores", example=10, gt=0.0) + cpu: float = Field(..., description="Number of cpu cores", examples=[10], gt=0.0) memory: int = Field( ..., description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) gpu: int = Field( - ..., description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + ..., description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) - id: str = Field( - ..., + id: Optional[str] = Field( + None, description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) @@ -231,49 +380,49 @@ class ResourceClass(BaseAPISpec): name: str = Field( ..., description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) - cpu: float = Field(..., description="Number of cpu cores", example=10, gt=0.0) + cpu: float = Field(..., description="Number of cpu cores", examples=[10], gt=0.0) memory: int = Field( ..., description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) gpu: int = Field( - ..., description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + ..., description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) max_storage: int = Field( ..., description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) default_storage: int = Field( ..., description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) default: bool = Field( ..., description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) tolerations: Optional[List[K8sLabel]] = Field( None, description="A list of k8s labels used for tolerations", - example=["test-label-1"], + examples=[["test-label-1"]], min_length=0, ) node_affinities: Optional[List[NodeAffinity]] = Field( None, description="A list of k8s labels used for tolerations and/or node affinity", - example=[{"key": "test-label-1", "required_during_scheduling": False}], + examples=[[{"key": "test-label-1", "required_during_scheduling": False}]], min_length=0, ) @@ -285,51 +434,51 @@ class ResourceClassPatch(BaseAPISpec): name: Optional[str] = Field( None, description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) cpu: Optional[float] = Field( - None, description="Number of cpu cores", example=10, gt=0.0 + None, description="Number of cpu cores", examples=[10], gt=0.0 ) memory: Optional[int] = Field( None, description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) gpu: Optional[int] = Field( - None, description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + None, description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) max_storage: Optional[int] = Field( None, description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) default_storage: Optional[int] = Field( None, description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) default: Optional[bool] = Field( - None, + False, description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) tolerations: Optional[List[K8sLabel]] = Field( None, description="A list of k8s labels used for tolerations", - example=["test-label-1"], + examples=[["test-label-1"]], min_length=0, ) node_affinities: Optional[List[NodeAffinity]] = Field( None, description="A list of k8s labels used for tolerations and/or node affinity", - example=[{"key": "test-label-1", "required_during_scheduling": False}], + examples=[[{"key": "test-label-1", "required_during_scheduling": False}]], min_length=0, ) @@ -341,57 +490,57 @@ class ResourceClassPatchWithId(BaseAPISpec): name: Optional[str] = Field( None, description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) cpu: Optional[float] = Field( - None, description="Number of cpu cores", example=10, gt=0.0 + None, description="Number of cpu cores", examples=[10], gt=0.0 ) memory: Optional[int] = Field( None, description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) gpu: Optional[int] = Field( - None, description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + None, description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) max_storage: Optional[int] = Field( None, description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) default_storage: Optional[int] = Field( None, description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) id: int = Field( ..., description="An integer ID used to identify different resources", - example=1, + examples=[1], ge=0, ) default: Optional[bool] = Field( - None, + False, description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) tolerations: Optional[List[K8sLabel]] = Field( None, description="A list of k8s labels used for tolerations", - example=["test-label-1"], + examples=[["test-label-1"]], min_length=0, ) node_affinities: Optional[List[NodeAffinity]] = Field( None, description="A list of k8s labels used for tolerations and/or node affinity", - example=[{"key": "test-label-1", "required_during_scheduling": False}], + examples=[[{"key": "test-label-1", "required_during_scheduling": False}]], min_length=0, ) @@ -403,55 +552,55 @@ class ResourceClassWithId(BaseAPISpec): name: str = Field( ..., description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) - cpu: float = Field(..., description="Number of cpu cores", example=10, gt=0.0) + cpu: float = Field(..., description="Number of cpu cores", examples=[10], gt=0.0) memory: int = Field( ..., description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) gpu: int = Field( - ..., description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + ..., description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) max_storage: int = Field( ..., description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) default_storage: int = Field( ..., description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) id: int = Field( ..., description="An integer ID used to identify different resources", - example=1, + examples=[1], ge=0, ) default: bool = Field( ..., description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) tolerations: Optional[List[K8sLabel]] = Field( None, description="A list of k8s labels used for tolerations", - example=["test-label-1"], + examples=[["test-label-1"]], min_length=0, ) node_affinities: Optional[List[NodeAffinity]] = Field( None, description="A list of k8s labels used for tolerations and/or node affinity", - example=[{"key": "test-label-1", "required_during_scheduling": False}], + examples=[[{"key": "test-label-1", "required_during_scheduling": False}]], min_length=0, ) @@ -463,56 +612,56 @@ class ResourceClassWithIdFiltered(BaseAPISpec): name: str = Field( ..., description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) - cpu: float = Field(..., description="Number of cpu cores", example=10, gt=0.0) + cpu: float = Field(..., description="Number of cpu cores", examples=[10], gt=0.0) memory: int = Field( ..., description="Number of gigabytes of memory", - example=4, + examples=[4], gt=0, le=9223372036854775807, ) gpu: int = Field( - ..., description="Number of GPUs", example=8, ge=0, le=9223372036854775807 + ..., description="Number of GPUs", examples=[8], ge=0, le=9223372036854775807 ) max_storage: int = Field( ..., description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) default_storage: int = Field( ..., description="Number of gigabytes of storage", - example=100, + examples=[100], gt=0, le=9223372036854775807, ) id: int = Field( ..., description="An integer ID used to identify different resources", - example=1, + examples=[1], ge=0, ) default: bool = Field( ..., description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) matching: Optional[bool] = None tolerations: Optional[List[K8sLabel]] = Field( None, description="A list of k8s labels used for tolerations", - example=["test-label-1"], + examples=[["test-label-1"]], min_length=0, ) node_affinities: Optional[List[NodeAffinity]] = Field( None, description="A list of k8s labels used for tolerations and/or node affinity", - example=[{"key": "test-label-1", "required_during_scheduling": False}], + examples=[[{"key": "test-label-1", "required_during_scheduling": False}]], min_length=0, ) @@ -520,27 +669,29 @@ class ResourceClassWithIdFiltered(BaseAPISpec): class ResourceClassesWithIdResponse(RootModel[List[ResourceClassWithId]]): root: List[ResourceClassWithId] = Field( ..., - example=[ - { - "name": "resource class 1", - "cpu": 1.5, - "memory": 2, - "gpu": 0, - "max_storage": 100, - "id": 1, - "default": True, - "default_storage": 10, - }, - { - "name": "resource class 2", - "cpu": 4.5, - "memory": 10, - "gpu": 2, - "default_storage": 10, - "max_storage": 10000, - "id": 2, - "default": False, - }, + examples=[ + [ + { + "name": "resource class 1", + "cpu": 1.5, + "memory": 2, + "gpu": 0, + "max_storage": 100, + "id": 1, + "default": True, + "default_storage": 10, + }, + { + "name": "resource class 2", + "cpu": 4.5, + "memory": 10, + "gpu": 2, + "default_storage": 10, + "max_storage": 10000, + "id": 2, + "default": False, + }, + ] ], ) @@ -549,23 +700,23 @@ class ResourcePool(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - quota: Optional[Quota] = None + quota: Optional[QuotaWithOptionalId] = None classes: List[ResourceClass] name: str = Field( ..., description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) public: bool = Field( ..., description="A resource pool whose classes can be accessed by anyone", - example=False, + examples=[False], ) default: bool = Field( ..., description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) idle_threshold: Optional[int] = Field( None, @@ -579,6 +730,13 @@ class ResourcePool(BaseAPISpec): ge=0, le=2147483647, ) + cluster_id: Optional[str] = Field( + None, + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) class ResourcePoolPatch(BaseAPISpec): @@ -588,27 +746,29 @@ class ResourcePoolPatch(BaseAPISpec): quota: Optional[QuotaPatch] = None classes: Optional[List[ResourceClassPatchWithId]] = Field( None, - example=[ - {"name": "resource class 1", "id": 1}, - {"cpu": 4.5, "max_storage": 10000, "id": 2}, + examples=[ + [ + {"name": "resource class 1", "id": 1}, + {"cpu": 4.5, "max_storage": 10000, "id": 2}, + ] ], min_length=1, ) name: Optional[str] = Field( None, description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) public: Optional[bool] = Field( - None, + False, description="A resource pool whose classes can be accessed by anyone", - example=False, + examples=[False], ) default: Optional[bool] = Field( - None, + False, description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) idle_threshold: Optional[int] = Field( None, @@ -622,6 +782,13 @@ class ResourcePoolPatch(BaseAPISpec): ge=0, le=2147483647, ) + cluster_id: Optional[str] = Field( + None, + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) class ResourcePoolPut(BaseAPISpec): @@ -631,44 +798,46 @@ class ResourcePoolPut(BaseAPISpec): quota: Optional[QuotaWithId] = None classes: List[ResourceClassWithId] = Field( ..., - example=[ - { - "name": "resource class 1", - "cpu": 1.5, - "memory": 2, - "gpu": 0, - "max_storage": 100, - "id": 1, - "default": True, - "default_storage": 10, - }, - { - "name": "resource class 2", - "cpu": 4.5, - "memory": 10, - "gpu": 2, - "default_storage": 10, - "max_storage": 10000, - "id": 2, - "default": False, - }, + examples=[ + [ + { + "name": "resource class 1", + "cpu": 1.5, + "memory": 2, + "gpu": 0, + "max_storage": 100, + "id": 1, + "default": True, + "default_storage": 10, + }, + { + "name": "resource class 2", + "cpu": 4.5, + "memory": 10, + "gpu": 2, + "default_storage": 10, + "max_storage": 10000, + "id": 2, + "default": False, + }, + ] ], ) name: str = Field( ..., description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) public: bool = Field( ..., description="A resource pool whose classes can be accessed by anyone", - example=False, + examples=[False], ) default: bool = Field( ..., description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) idle_threshold: Optional[int] = Field( None, @@ -682,6 +851,13 @@ class ResourcePoolPut(BaseAPISpec): ge=0, le=2147483647, ) + cluster_id: Optional[str] = Field( + None, + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) class ResourcePoolWithId(BaseAPISpec): @@ -693,24 +869,24 @@ class ResourcePoolWithId(BaseAPISpec): name: str = Field( ..., description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) id: int = Field( ..., description="An integer ID used to identify different resources", - example=1, + examples=[1], ge=0, ) public: bool = Field( ..., description="A resource pool whose classes can be accessed by anyone", - example=False, + examples=[False], ) default: bool = Field( ..., description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) idle_threshold: Optional[int] = Field( None, @@ -724,6 +900,7 @@ class ResourcePoolWithId(BaseAPISpec): ge=0, le=2147483647, ) + cluster: Optional[Cluster1] = None class ResourcePoolWithIdFiltered(BaseAPISpec): @@ -735,24 +912,24 @@ class ResourcePoolWithIdFiltered(BaseAPISpec): name: str = Field( ..., description="A name for a specific resource", - example="the name of a resource", + examples=["the name of a resource"], min_length=5, ) id: int = Field( ..., description="An integer ID used to identify different resources", - example=1, + examples=[1], ge=0, ) public: bool = Field( ..., description="A resource pool whose classes can be accessed by anyone", - example=False, + examples=[False], ) default: bool = Field( ..., description="A default selection for resource classes or resource pools", - example=False, + examples=[False], ) idle_threshold: Optional[int] = Field( None, @@ -766,6 +943,13 @@ class ResourcePoolWithIdFiltered(BaseAPISpec): ge=0, le=2147483647, ) + cluster_id: Optional[str] = Field( + None, + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) class ResourcePoolsWithId(RootModel[List[ResourcePoolWithId]]): diff --git a/components/renku_data_services/crc/apispec_base.py b/components/renku_data_services/crc/apispec_base.py index ee344ebf7..9aa628b53 100644 --- a/components/renku_data_services/crc/apispec_base.py +++ b/components/renku_data_services/crc/apispec_base.py @@ -1,6 +1,12 @@ """Base models for API specifications.""" -from pydantic import BaseModel +from pathlib import PurePosixPath +from typing import Any + +from pydantic import BaseModel, field_validator +from ulid import ULID + +from renku_data_services.session import models class BaseAPISpec(BaseModel): @@ -10,3 +16,41 @@ class Config: """Enables orm mode for pydantic.""" from_attributes = True + + @field_validator("*", mode="before", check_fields=False) + @classmethod + def serialize_ulid(cls, value: Any) -> Any: + """Handle ULIDs.""" + if isinstance(value, ULID): + return str(value) + return value + + @field_validator("project_id", mode="before", check_fields=False) + @classmethod + def serialize_project_id(cls, project_id: str | ULID) -> str: + """Custom serializer that can handle ULIDs.""" + return str(project_id) + + @field_validator("environment_id", mode="before", check_fields=False) + @classmethod + def serialize_environment_id(cls, environment_id: str | ULID | None) -> str | None: + """Custom serializer that can handle the environment kind.""" + if environment_id is None: + return None + return str(environment_id) + + @field_validator("environment_kind", mode="before", check_fields=False) + @classmethod + def serialize_environment_kind(cls, environment_kind: models.EnvironmentKind | str) -> str: + """Custom serializer that can handle ULIDs.""" + if isinstance(environment_kind, models.EnvironmentKind): + return environment_kind.value + return environment_kind + + @field_validator("working_directory", "mount_directory", check_fields=False, mode="before") + @classmethod + def convert_path_to_string(cls, val: str | PurePosixPath) -> str: + """Converts the python path to a regular string when pydantic deserializes.""" + if isinstance(val, PurePosixPath): + return val.as_posix() + return val diff --git a/components/renku_data_services/crc/blueprints.py b/components/renku_data_services/crc/blueprints.py index ba0e4a31c..96ad1f7d5 100644 --- a/components/renku_data_services/crc/blueprints.py +++ b/components/renku_data_services/crc/blueprints.py @@ -5,6 +5,7 @@ from sanic import HTTPResponse, Request, empty, json from sanic_ext import validate +from ulid import ULID import renku_data_services.base_models as base_models from renku_data_services import errors @@ -13,7 +14,8 @@ from renku_data_services.base_api.misc import validate_body_root_model, validate_db_ids, validate_query from renku_data_services.base_models.validation import validated_json from renku_data_services.crc import apispec, models -from renku_data_services.crc.db import ResourcePoolRepository, UserRepository +from renku_data_services.crc.core import validate_cluster, validate_cluster_patch +from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository, UserRepository from renku_data_services.k8s.quota import QuotaRepository from renku_data_services.users.db import UserRepo as KcUserRepo from renku_data_services.users.models import UserInfo @@ -25,6 +27,7 @@ class ResourcePoolsBP(CustomBlueprint): rp_repo: ResourcePoolRepository user_repo: UserRepository + cluster_repo: ClusterRepository authenticator: base_models.Authenticator def get_all(self) -> BlueprintFactoryResponse: @@ -47,7 +50,11 @@ def post(self) -> BlueprintFactoryResponse: @only_admins @validate(json=apispec.ResourcePool) async def _post(_: Request, user: base_models.APIUser, body: apispec.ResourcePool) -> HTTPResponse: - rp = models.ResourcePool.from_dict(body.model_dump(exclude_none=True)) + cluster = None + if body.cluster_id is not None: + cluster = await self.cluster_repo.select(api_user=user, cluster_id=ULID.from_str(body.cluster_id)) + rp = models.ResourcePool.from_dict({**body.model_dump(exclude_none=True), "cluster": cluster}) + res = await self.rp_repo.insert_resource_pool(api_user=user, resource_pool=rp) return validated_json(apispec.ResourcePoolWithId, res, status=201) @@ -556,10 +563,103 @@ async def _post_put( if not user_check: raise errors.MissingResourceError(message=f"User with user ID {user_id} cannot be found") rps = await self.repo.update_user_resource_pools( - keycloak_id=user_id, resource_pool_ids=resource_pool_ids.root, append=post, api_user=api_user + keycloak_id=user_id, + resource_pool_ids=[i.root for i in resource_pool_ids.root], + append=post, + api_user=api_user, ) return validated_json( apispec.ResourcePoolsWithId, rps, status=201 if post else 200, ) + + +@dataclass(kw_only=True) +class ClustersBP(CustomBlueprint): + """Handlers for dealing with the cluster definitions.""" + + repo: ClusterRepository + authenticator: base_models.Authenticator + + def get_all(self) -> BlueprintFactoryResponse: + """Get the cluster descriptions.""" + + @authenticate(self.authenticator) + @only_admins + async def _handler(_request: Request, user: base_models.APIUser) -> HTTPResponse: + clusters = [c async for c in self.repo.select_all()] + + return validated_json(apispec.ClustersWithId, clusters) + + return "/clusters", ["GET"], _handler + + def post(self) -> BlueprintFactoryResponse: + """Create a cluster description.""" + + @authenticate(self.authenticator) + @only_admins + @validate(json=apispec.Cluster) + async def _handler(_request: Request, user: base_models.APIUser, body: apispec.Cluster) -> HTTPResponse: + cluster = validate_cluster(body) + cluster = await self.repo.insert(user, cluster) + + return validated_json(apispec.ClusterWithId, cluster, status=201) + + return "/clusters", ["POST"], _handler + + def get(self) -> BlueprintFactoryResponse: + """Get the cluster descriptions.""" + + @authenticate(self.authenticator) + @only_admins + async def _handler(_request: Request, user: base_models.APIUser, cluster_id: ULID) -> HTTPResponse: + cluster = await self.repo.select(user, cluster_id) + + return validated_json(apispec.ClusterWithId, cluster, status=200) + + return "/clusters/", ["GET"], _handler + + def put(self) -> BlueprintFactoryResponse: + """Update the cluster descriptions.""" + + @authenticate(self.authenticator) + @only_admins + @validate(json=apispec.Cluster) + async def _handler( + _request: Request, user: base_models.APIUser, cluster_id: ULID, body: apispec.Cluster + ) -> HTTPResponse: + cluster = validate_cluster(body) + cluster = await self.repo.update(user, cluster.to_cluster_patch(), cluster_id) + + return validated_json(apispec.ClusterWithId, cluster, status=201) + + return "/clusters/", ["PUT"], _handler + + def patch(self) -> BlueprintFactoryResponse: + """Patch the cluster descriptions.""" + + @authenticate(self.authenticator) + @only_admins + @validate(json=apispec.ClusterPatch) + async def _handler( + _request: Request, user: base_models.APIUser, cluster_id: ULID, body: apispec.ClusterPatch + ) -> HTTPResponse: + patch = validate_cluster_patch(body) + cluster = await self.repo.update(user, patch, cluster_id) + + return validated_json(apispec.ClusterWithId, cluster, status=201) + + return "/clusters/", ["PATCH"], _handler + + def delete(self) -> BlueprintFactoryResponse: + """Remove the cluster description.""" + + @authenticate(self.authenticator) + @only_admins + async def _handler(_request: Request, user: base_models.APIUser, cluster_id: ULID) -> HTTPResponse: + await self.repo.delete(user, cluster_id) + + return HTTPResponse(status=204) + + return "/clusters/", ["DELETE"], _handler diff --git a/components/renku_data_services/crc/core.py b/components/renku_data_services/crc/core.py new file mode 100644 index 000000000..6b40f79aa --- /dev/null +++ b/components/renku_data_services/crc/core.py @@ -0,0 +1,38 @@ +"""crc modules converters and validators.""" + +from renku_data_services.crc import apispec, models + + +def validate_cluster(body: apispec.Cluster) -> models.Cluster: + """Convert a REST API Cluster object to a model Cluster object.""" + return models.Cluster( + name=body.name, + config_name=body.config_name, + session_protocol=body.session_protocol, + session_host=body.session_host, + session_port=body.session_port, + session_path=body.session_path, + session_ingress_annotations=body.session_ingress_annotations.model_dump(), + session_tls_secret_name=body.session_tls_secret_name, + session_storage_class=body.session_storage_class, + service_account_name=body.service_account_name, + ) + + +def validate_cluster_patch(patch: apispec.ClusterPatch) -> models.ClusterPatch: + """Convert a REST API Cluster object patch to a model Cluster object.""" + + return models.ClusterPatch( + name=patch.name, + config_name=patch.config_name, + session_protocol=patch.session_protocol, + session_host=patch.session_host, + session_port=patch.session_port, + session_path=patch.session_path, + session_ingress_annotations=patch.session_ingress_annotations.model_dump() + if patch.session_ingress_annotations is not None + else None, + session_tls_secret_name=patch.session_tls_secret_name, + session_storage_class=patch.session_storage_class, + service_account_name=patch.service_account_name, + ) diff --git a/components/renku_data_services/crc/db.py b/components/renku_data_services/crc/db.py index 8b5536b1e..a390fb696 100644 --- a/components/renku_data_services/crc/db.py +++ b/components/renku_data_services/crc/db.py @@ -7,8 +7,8 @@ """ from asyncio import gather -from collections.abc import Callable, Collection, Coroutine, Sequence -from dataclasses import dataclass, field +from collections.abc import AsyncGenerator, Callable, Collection, Coroutine, Sequence +from dataclasses import asdict, dataclass, field from functools import wraps from typing import Any, Concatenate, Optional, ParamSpec, TypeVar, cast @@ -16,11 +16,15 @@ from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine from sqlalchemy.orm import selectinload from sqlalchemy.sql import Select, and_, not_, or_ +from ulid import ULID import renku_data_services.base_models as base_models from renku_data_services import errors from renku_data_services.crc import models from renku_data_services.crc import orm as schemas +from renku_data_services.crc.apispec import Protocol as CrcProtocol +from renku_data_services.crc.models import Cluster, ClusterPatch, SavedCluster +from renku_data_services.crc.orm import ClusterORM from renku_data_services.k8s.quota import QuotaRepository from renku_data_services.users.db import UserRepo @@ -148,6 +152,10 @@ async def decorated_function(self: Any, *args: _P.args, **kwargs: _P.kwargs) -> class ResourcePoolRepository(_Base): """The adapter used for accessing resource pools with SQLAlchemy.""" + def __init__(self, session_maker: Callable[..., AsyncSession], quotas_repo: QuotaRepository): + super().__init__(session_maker, quotas_repo) + self.__cluster_repo = ClusterRepository(session_maker=self.session_maker) + async def initialize(self, async_connection_url: str, rp: models.ResourcePool) -> None: """Add the default resource pool if it does not already exist.""" engine = create_async_engine(async_connection_url, poolclass=NullPool) @@ -168,7 +176,11 @@ async def get_resource_pools( ) -> list[models.ResourcePool]: """Get resource pools from database.""" async with self.session_maker() as session: - stmt = select(schemas.ResourcePoolORM).options(selectinload(schemas.ResourcePoolORM.classes)) + stmt = ( + select(schemas.ResourcePoolORM) + .options(selectinload(schemas.ResourcePoolORM.classes)) + .options(selectinload(schemas.ResourcePoolORM.cluster)) + ) if name is not None: stmt = stmt.where(schemas.ResourcePoolORM.name == name) if id is not None: @@ -183,6 +195,44 @@ async def get_resource_pools( output.append(rp.dump(quota)) return output + async def get_resource_pool_from_class( + self, api_user: base_models.APIUser, resource_class_id: int + ) -> models.ResourcePool: + """Get the resource pool the class belongs to.""" + async with self.session_maker() as session: + stmt = ( + select(schemas.ResourcePoolORM) + .where(schemas.ResourcePoolORM.classes.any(schemas.ResourceClassORM.id == resource_class_id)) + .options(selectinload(schemas.ResourcePoolORM.classes)) + .options(selectinload(schemas.ResourcePoolORM.cluster)) + ) + # NOTE: The line below ensures that the right users can access the right resources, do not remove. + stmt = _resource_pool_access_control(api_user, stmt) + res = await session.execute(stmt) + orm = res.scalar() + if orm is None: + raise errors.MissingResourceError( + message=f"Could not find the resource pool where a class with ID {resource_class_id} exists." + ) + quota = self.quotas_repo.get_quota(orm.quota) if orm.quota else None + return orm.dump(quota) + + async def get_default_resource_pool(self) -> models.ResourcePool: + """Get the default resource pool.""" + async with self.session_maker() as session: + stmt = ( + select(schemas.ResourcePoolORM) + .where(schemas.ResourcePoolORM.default == true()) + .options(selectinload(schemas.ResourcePoolORM.classes)) + ) + res = await session.scalar(stmt) + if res is None: + raise errors.ProgrammingError( + message="Could not find the default resource pool, but this has to exist." + ) + quota = self.quotas_repo.get_quota(res.quota) if res.quota else None + return res.dump(quota) + async def get_default_resource_class(self) -> models.ResourceClass: """Get the default resource class in the default resource pool.""" async with self.session_maker() as session: @@ -238,14 +288,15 @@ async def insert_resource_pool( ) -> models.ResourcePool: """Insert resource pool into database.""" quota = None - if resource_pool.quota: + if resource_pool.quota is not None: for rc in resource_pool.classes: if not resource_pool.quota.is_resource_class_compatible(rc): raise errors.ValidationError( message=f"The quota {quota} is not compatible with resource class {rc}" ) - quota = self.quotas_repo.create_quota(resource_pool.quota) + quota = self.quotas_repo.create_quota(models.Quota.from_dict(asdict(resource_pool.quota))) resource_pool = resource_pool.set_quota(quota) + orm = schemas.ResourcePoolORM.load(resource_pool) async with self.session_maker() as session, session.begin(): if orm.idle_threshold == 0: @@ -261,7 +312,11 @@ async def insert_resource_pool( message="There can only be one default resource pool and one already exists." ) session.add(orm) - return orm.dump(quota) + + await session.flush() + await session.refresh(orm) + + return orm.dump(quota) async def get_classes( self, @@ -295,7 +350,7 @@ async def get_resource_class(self, api_user: base_models.APIUser, id: int) -> mo """Get a specific resource class by its ID.""" classes = await self.get_classes(api_user, id) if len(classes) == 0: - raise errors.MissingResourceError(message=f"The resource class with ID {id} cannot be found", quiet=True) + raise errors.MissingResourceError(message=f"The resource class with ID {id} cannot be found") return classes[0] @_only_admins @@ -335,7 +390,6 @@ async def insert_resource_class( @_only_admins async def update_resource_pool(self, api_user: base_models.APIUser, id: int, **kwargs: Any) -> models.ResourcePool: """Update an existing resource pool in the database.""" - rp: Optional[schemas.ResourcePoolORM] = None async with self.session_maker() as session, session.begin(): stmt = ( select(schemas.ResourcePoolORM) @@ -354,15 +408,27 @@ async def update_resource_pool(self, api_user: base_models.APIUser, id: int, **k kwargs["idle_threshold"] = None if kwargs.get("hibernation_threshold") == 0: kwargs["hibernation_threshold"] = None + if kwargs.get("cluster_id") == "": + kwargs["cluster_id"] = None # NOTE: The .update method on the model validates the update to the resource pool old_rp_model = rp.dump(quota) new_rp_model = old_rp_model.update(**kwargs) - new_classes = None new_classes_coroutines = [] for key, val in kwargs.items(): match key: case "name" | "public" | "default" | "idle_threshold" | "hibernation_threshold": setattr(rp, key, val) + case "cluster_id": + cluster_id = val + cluster = None + + if cluster_id is not None: + cluster = await self.__cluster_repo.select( + api_user=api_user, cluster_id=ULID.from_str(cluster_id) + ) + + rp.cluster_id = cluster_id + new_rp_model = new_rp_model.update(cluster=cluster) case "quota": if val is None: continue @@ -372,34 +438,37 @@ async def update_resource_pool(self, api_user: base_models.APIUser, id: int, **k # 2. a quota exists and can only be updated, not replaced (the ids, if provided, must match) new_id = val.get("id") - - if quota and quota.id is not None and new_id is not None and quota.id != new_id: - raise errors.ValidationError( - message="The ID of an existing quota cannot be updated, " - f"please remove the ID field from the request or use ID {quota.id}." - ) + quota_id = quota.id if quota is not None else None # the id must match for update - if quota: - val["id"] = quota.id or new_id + match (quota_id, new_id): + case (None, _): + pass + case (quota_id, None): + val["id"] = quota_id + case (quota_id, new_id): + if quota_id != new_id: + raise errors.ValidationError( + message="The ID of an existing quota cannot be updated, " + f"please remove the ID field from the request or use ID {quota_id}." + ) new_quota = models.Quota.from_dict(val) - if new_id or quota: - new_quota = self.quotas_repo.update_quota(new_quota) - else: + if new_id is None and quota is None: new_quota = self.quotas_repo.create_quota(new_quota) + else: + new_quota = self.quotas_repo.update_quota(new_quota) + rp.quota = new_quota.id new_rp_model = new_rp_model.update(quota=new_quota) case "classes": - new_classes = [] for cls in val: class_id = cls.pop("id") cls.pop("matching", None) if len(cls) == 0: raise errors.ValidationError( - message="More fields than the id of the class " - "should be provided when updating it" + message="More fields than the id of the class should be provided when updating it" ) new_classes_coroutines.append( self.update_resource_class( @@ -818,8 +887,86 @@ async def update_user(self, api_user: base_models.APIUser, keycloak_id: str, **k allowed_updates = set(["no_default_access"]) if not set(kwargs.keys()).issubset(allowed_updates): raise errors.ValidationError( - message=f"Only the following fields {allowed_updates} " "can be updated for a resource pool user.." + message=f"Only the following fields {allowed_updates} can be updated for a resource pool user.." ) if (no_default_access := kwargs.get("no_default_access")) is not None: user.no_default_access = no_default_access return user.dump() + + +@dataclass +class ClusterRepository: + """Repository for cluster configurations.""" + + session_maker: Callable[..., AsyncSession] + + async def select_all(self) -> AsyncGenerator[SavedCluster, Any]: + """Get cluster configurations from the database.""" + async with self.session_maker() as session: + clusters = await session.stream_scalars(select(ClusterORM)) + async for cluster in clusters: + yield cluster.dump() + + async def select(self, api_user: base_models.APIUser, cluster_id: ULID) -> SavedCluster: + """Get cluster configurations from the database.""" + + async with self.session_maker() as session: + r = await session.scalars(select(ClusterORM).where(ClusterORM.id == cluster_id)) + cluster = r.one_or_none() + if cluster is None: + raise errors.MissingResourceError(message=f"Cluster definition id='{cluster_id}' does not exist.") + + return cluster.dump() + + @_only_admins + async def insert(self, api_user: base_models.APIUser, cluster: Cluster) -> Cluster: + """Creates a new cluster configuration.""" + + cluster_orm = ClusterORM.load(cluster) + async with self.session_maker() as session, session.begin(): + session.add(cluster_orm) + await session.flush() + await session.refresh(cluster_orm) + + return cluster_orm.dump() + + @_only_admins + async def update(self, api_user: base_models.APIUser, cluster: ClusterPatch, cluster_id: ULID) -> Cluster: + """Updates a cluster configuration.""" + + async with self.session_maker() as session, session.begin(): + saved_cluster = (await session.scalars(select(ClusterORM).where(ClusterORM.id == cluster_id))).one_or_none() + if saved_cluster is None: + raise errors.MissingResourceError(message=f"Cluster definition id='{cluster_id}' does not exist.") + + for key, value in asdict(cluster).items(): + match key, value: + case "session_protocol", CrcProtocol(): + setattr(saved_cluster, key, value.value) + case "session_storage_class", "": + # If we received an empty string in the storage class, reset it to the default storage class by + # setting it to None. + setattr(saved_cluster, key, None) + case "service_account_name", "": + # If we received an empty string in the service account name, set it back to None. + setattr(saved_cluster, key, None) + case _, None: + # Do not modify a value which has not been set in the patch + pass + case _, _: + setattr(saved_cluster, key, value) + + await session.flush() + await session.refresh(saved_cluster) + + return saved_cluster.dump() + + @_only_admins + async def delete(self, api_user: base_models.APIUser, cluster_id: ULID) -> None: + """Get cluster configurations from the database.""" + + async with self.session_maker() as session, session.begin(): + r = await session.scalars(select(ClusterORM).where(ClusterORM.id == cluster_id)) + cluster = r.one_or_none() + if cluster is not None: + await session.delete(cluster) diff --git a/components/renku_data_services/crc/models.py b/components/renku_data_services/crc/models.py index c5eea3417..efb91e21d 100644 --- a/components/renku_data_services/crc/models.py +++ b/components/renku_data_services/crc/models.py @@ -1,13 +1,22 @@ """Domain models for the application.""" +from __future__ import annotations + from collections.abc import Callable +from copy import deepcopy from dataclasses import asdict, dataclass, field from enum import StrEnum -from typing import Any, Optional, Protocol +from typing import TYPE_CHECKING, Any, Optional, Protocol from uuid import uuid4 +from ulid import ULID + +from renku_data_services import errors from renku_data_services.errors import ValidationError +if TYPE_CHECKING: + from renku_data_services.crc.apispec import Protocol as CrcApiProtocol + class ResourcesProtocol(Protocol): """Used to represent resource values present in a resource class or quota.""" @@ -29,7 +38,7 @@ def memory(self) -> int: @property def max_storage(self) -> Optional[int]: - """Maximum allowable storeage in gigabytes.""" + """Maximum allowable storage in gigabytes.""" ... @@ -72,7 +81,7 @@ class NodeAffinity: required_during_scheduling: bool = False @classmethod - def from_dict(cls, data: dict) -> "NodeAffinity": + def from_dict(cls, data: dict) -> NodeAffinity: """Create a node affinity from a dictionary.""" return cls(**data) @@ -106,7 +115,7 @@ def __post_init__(self) -> None: object.__setattr__(self, "tolerations", sorted(self.tolerations)) @classmethod - def from_dict(cls, data: dict) -> "ResourceClass": + def from_dict(cls, data: dict) -> ResourceClass: """Create the model from a plain dictionary.""" node_affinities: list[NodeAffinity] = [] tolerations: list[str] = [] @@ -125,11 +134,11 @@ def from_dict(cls, data: dict) -> "ResourceClass": quota = data_quota.id return cls(**{**data, "tolerations": tolerations, "node_affinities": node_affinities, "quota": quota}) - def is_quota_valid(self, quota: "Quota") -> bool: + def is_quota_valid(self, quota: Quota) -> bool: """Determine if a quota is compatible with the resource class.""" return quota >= self - def update(self, **kwargs: dict) -> "ResourceClass": + def update(self, **kwargs: dict) -> ResourceClass: """Update a field of the resource class and return a new copy.""" if not kwargs: return self @@ -151,25 +160,87 @@ class Quota(ResourcesCompareMixin): memory: int gpu: int gpu_kind: GpuKind = GpuKind.NVIDIA - id: Optional[str] = None + id: str @classmethod - def from_dict(cls, data: dict) -> "Quota": + def from_dict(cls, data: dict) -> Quota: """Create the model from a plain dictionary.""" - gpu_kind = GpuKind.NVIDIA - if "gpu_kind" in data: - gpu_kind = data["gpu_kind"] if isinstance(data["gpu_kind"], GpuKind) else GpuKind[data["gpu_kind"]] - return cls(**{**data, "gpu_kind": gpu_kind}) + instance = deepcopy(data) + + match instance.get("gpu_kind"): + case None: + instance["gpu_kind"] = GpuKind.NVIDIA + case GpuKind(): + pass + case x: + instance["gpu_kind"] = GpuKind[x] - def is_resource_class_compatible(self, rc: "ResourceClass") -> bool: + match instance.get("id"): + case None: + instance["id"] = str(uuid4()) + case "": + instance["id"] = str(uuid4()) + + return cls(**instance) + + def is_resource_class_compatible(self, rc: ResourceClass) -> bool: """Determine if a resource class is compatible with the quota.""" return rc <= self - def generate_id(self) -> "Quota": - """Create a new quota with its ID set to a uuid.""" - if self.id is not None: - return self - return self.from_dict({**asdict(self), "id": str(uuid4())}) + +@dataclass(frozen=True, eq=True, kw_only=True) +class ClusterPatch: + """K8s Cluster settings patch.""" + + name: str | None + config_name: str | None + session_protocol: CrcApiProtocol | None + session_host: str | None + session_port: int | None + session_path: str | None + session_ingress_annotations: dict[str, Any] | None + session_tls_secret_name: str | None + session_storage_class: str | None + service_account_name: str | None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class Cluster: + """K8s Cluster settings.""" + + name: str + config_name: str + session_protocol: CrcApiProtocol + session_host: str + session_port: int + session_path: str + session_ingress_annotations: dict[str, Any] + session_tls_secret_name: str + session_storage_class: str | None + service_account_name: str | None = None + + def to_cluster_patch(self) -> ClusterPatch: + """Convert to ClusterPatch.""" + + return ClusterPatch( + name=self.name, + config_name=self.config_name, + session_protocol=self.session_protocol, + session_host=self.session_host, + session_port=self.session_port, + session_path=self.session_path, + session_ingress_annotations=self.session_ingress_annotations, + session_tls_secret_name=self.session_tls_secret_name, + session_storage_class=self.session_storage_class, + service_account_name=self.service_account_name, + ) + + +@dataclass(frozen=True, eq=True, kw_only=True) +class SavedCluster(Cluster): + """K8s Cluster settings from the DB.""" + + id: ULID @dataclass(frozen=True, eq=True, kw_only=True) @@ -177,13 +248,14 @@ class ResourcePool: """Resource pool model.""" name: str - classes: list["ResourceClass"] - quota: Optional[Quota] = None - id: Optional[int] = None - idle_threshold: Optional[int] = None - hibernation_threshold: Optional[int] = None + classes: list[ResourceClass] + quota: Quota | None = None + id: int | None = None + idle_threshold: int | None = None + hibernation_threshold: int | None = None default: bool = False public: bool = False + cluster: SavedCluster | None = None def __post_init__(self) -> None: """Validate the resource pool after initialization.""" @@ -205,7 +277,7 @@ def __post_init__(self) -> None: default_classes = [] for cls in list(self.classes): - if self.quota and not self.quota.is_resource_class_compatible(cls): + if self.quota is not None and not self.quota.is_resource_class_compatible(cls): raise ValidationError( message=f"The resource class with name {cls.name} is not compatible with the quota." ) @@ -214,33 +286,61 @@ def __post_init__(self) -> None: if len(default_classes) != 1: raise ValidationError(message="One default class is required in each resource pool.") - def set_quota(self, val: Quota) -> "ResourcePool": + def set_quota(self, val: Quota) -> ResourcePool: """Set the quota for a resource pool.""" for cls in list(self.classes): if not val.is_resource_class_compatible(cls): raise ValidationError( - message=f"The resource class with name {cls.name} is not compatiable with the quota." + message=f"The resource class with name {cls.name} is not compatible with the quota." ) return self.from_dict({**asdict(self), "quota": val}) - def update(self, **kwargs: Any) -> "ResourcePool": + def update(self, **kwargs: Any) -> ResourcePool: """Determine if an update to a resource pool is valid and if valid create new updated resource pool.""" if self.default and "default" in kwargs and not kwargs["default"]: raise ValidationError(message="A default resource pool cannot be made non-default.") return ResourcePool.from_dict({**asdict(self), **kwargs}) @classmethod - def from_dict(cls, data: dict) -> "ResourcePool": + def from_dict(cls, data: dict) -> ResourcePool: """Create the model from a plain dictionary.""" - quota: Optional[Quota] = None + cluster: SavedCluster | None = None + quota: Quota | None = None + classes: list[ResourceClass] = [] + if "quota" in data and isinstance(data["quota"], dict): quota = Quota.from_dict(data["quota"]) elif "quota" in data and isinstance(data["quota"], Quota): quota = data["quota"] + if "classes" in data and isinstance(data["classes"], set): classes = [ResourceClass.from_dict(c) if isinstance(c, dict) else c for c in list(data["classes"])] elif "classes" in data and isinstance(data["classes"], list): classes = [ResourceClass.from_dict(c) if isinstance(c, dict) else c for c in data["classes"]] + + match tmp := data.get("cluster"): + case SavedCluster(): + # This has to be before the dict() case, as this is also an instance of dict. + cluster = tmp + case dict(): + cluster = SavedCluster( + name=tmp["name"], + config_name=tmp["config_name"], + session_protocol=tmp["session_protocol"], + session_host=tmp["session_host"], + session_port=tmp["session_port"], + session_path=tmp["session_path"], + session_ingress_annotations=tmp["session_ingress_annotations"], + session_tls_secret_name=tmp["session_tls_secret_name"], + session_storage_class=tmp["session_storage_class"], + id=tmp["id"], + service_account_name=tmp.get("service_account_name"), + ) + case None: + cluster = None + case unknown: + raise errors.ValidationError(message=f"Got unexpected cluster data {unknown} when creating model") + return cls( name=data["name"], id=data.get("id"), @@ -250,4 +350,19 @@ def from_dict(cls, data: dict) -> "ResourcePool": public=data.get("public", False), idle_threshold=data.get("idle_threshold"), hibernation_threshold=data.get("hibernation_threshold"), + cluster=cluster, ) + + def get_resource_class(self, resource_class_id: int) -> ResourceClass | None: + """Find a specific resource class in the resource pool by the resource class id.""" + for rc in self.classes: + if rc.id == resource_class_id: + return rc + return None + + def get_default_resource_class(self) -> ResourceClass | None: + """Find the default resource class in the pool.""" + for rc in self.classes: + if rc.default: + return rc + return None diff --git a/components/renku_data_services/crc/orm.py b/components/renku_data_services/crc/orm.py index 3ec19ea35..3467666af 100644 --- a/components/renku_data_services/crc/orm.py +++ b/components/renku_data_services/crc/orm.py @@ -1,16 +1,23 @@ """SQLAlchemy schemas for the CRC database.""" -import logging from typing import Optional -from sqlalchemy import BigInteger, Column, Identity, Integer, MetaData, String, Table +from sqlalchemy import JSON, BigInteger, Column, Identity, Integer, MetaData, String, Table +from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, relationship from sqlalchemy.schema import ForeignKey +from ulid import ULID import renku_data_services.base_models as base_models +from renku_data_services.app_config import logging from renku_data_services.crc import models +from renku_data_services.crc.apispec import Protocol as CrcApiProtocol from renku_data_services.errors import errors +from renku_data_services.utils.sqlalchemy import ULIDType +logger = logging.getLogger(__name__) + +JSONVariant = JSON().with_variant(JSONB(), "postgresql") metadata_obj = MetaData(schema="resource_pools") # Has to match alembic ini section name @@ -133,6 +140,57 @@ def dump(self, matching_criteria: models.ResourceClass | None = None) -> models. ) +class ClusterORM(BaseORM): + """Cluster definition.""" + + __tablename__ = "clusters" + id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + name: Mapped[str] = mapped_column(String(40), unique=True, index=True) + config_name: Mapped[str] = mapped_column(String(40), unique=True, index=True) + session_protocol: Mapped[str] = mapped_column(String(10)) + session_host: Mapped[str] = mapped_column(String(256)) + session_port: Mapped[int] = mapped_column(Integer) + session_path: Mapped[str] = mapped_column(String()) + session_ingress_annotations: Mapped[dict[str, str]] = mapped_column(JSONVariant) + session_tls_secret_name: Mapped[str] = mapped_column(String(256)) + session_storage_class: Mapped[str | None] = mapped_column(String(256)) + # NOTE: The service account name is expected to point to a service account that already exists + # in the cluster in the namespace where the sessions will be launched. + service_account_name: Mapped[str | None] = mapped_column(String(256), default=None, nullable=True) + + def dump(self) -> models.SavedCluster: + """Create a cluster model from the ORM object.""" + return models.SavedCluster( + id=self.id, + name=self.name, + config_name=self.config_name, + session_protocol=CrcApiProtocol[self.session_protocol], + session_host=self.session_host, + session_port=self.session_port, + session_path=self.session_path, + session_ingress_annotations=self.session_ingress_annotations, + session_tls_secret_name=self.session_tls_secret_name, + session_storage_class=self.session_storage_class, + service_account_name=self.service_account_name, + ) + + @classmethod + def load(cls, cluster: models.Cluster) -> "ClusterORM": + """Create an ORM object from the cluster model.""" + return ClusterORM( + name=cluster.name, + config_name=cluster.config_name, + service_account_name=cluster.service_account_name, + session_protocol=cluster.session_protocol.value, + session_host=cluster.session_host, + session_port=cluster.session_port, + session_path=cluster.session_path, + session_ingress_annotations=cluster.session_ingress_annotations, + session_tls_secret_name=cluster.session_tls_secret_name, + session_storage_class=cluster.session_storage_class, + ) + + class ResourcePoolORM(BaseORM): """Resource pool specifies a set of resource classes, users that can access them and a quota.""" @@ -140,7 +198,10 @@ class ResourcePoolORM(BaseORM): name: Mapped[str] = mapped_column(String(40), index=True) quota: Mapped[Optional[str]] = mapped_column(String(63), index=True, default=None) users: Mapped[list["RPUserORM"]] = relationship( - secondary=resource_pools_users, back_populates="resource_pools", default_factory=list + secondary=resource_pools_users, + back_populates="resource_pools", + default_factory=list, + repr=False, ) classes: Mapped[list["ResourceClassORM"]] = relationship( back_populates="resource_pool", @@ -157,13 +218,22 @@ class ResourcePoolORM(BaseORM): default: Mapped[bool] = mapped_column(default=False, index=True) public: Mapped[bool] = mapped_column(default=False, index=True) id: Mapped[int] = mapped_column("id", Integer, Identity(always=True), primary_key=True, default=None, init=False) + cluster_id: Mapped[Optional[ULID]] = mapped_column( + ForeignKey(ClusterORM.id, ondelete="SET NULL"), default=None, index=True + ) + cluster: Mapped[Optional[ClusterORM]] = relationship(viewonly=True, default=None, lazy="selectin", init=False) @classmethod def load(cls, resource_pool: models.ResourcePool) -> "ResourcePoolORM": """Create an ORM object from the resource pool model.""" quota = None - if isinstance(resource_pool.quota, models.Quota): + if resource_pool.quota is not None: quota = resource_pool.quota.id + + cluster_id = None + if resource_pool.cluster is not None: + cluster_id = resource_pool.cluster.id + return cls( name=resource_pool.name, quota=quota, @@ -172,6 +242,7 @@ def load(cls, resource_pool: models.ResourcePool) -> "ResourcePoolORM": hibernation_threshold=resource_pool.hibernation_threshold, public=resource_pool.public, default=resource_pool.default, + cluster_id=cluster_id, ) def dump( @@ -179,17 +250,18 @@ def dump( ) -> models.ResourcePool: """Create a resource pool model from the ORM object and a quota.""" classes: list[ResourceClassORM] = self.classes - if quota and quota.id != self.quota: + if quota is not None and quota.id != self.quota: raise errors.BaseError( message="Unexpected error when dumping a resource pool ORM.", detail=f"The quota name in the database {self.quota} and Kubernetes {quota.id} do not match.", ) if (quota is None and self.quota is not None) or (quota is not None and self.quota is None): - logging.error( + logger.error( f"Unexpected error when dumping resource pool ORM with ID {self.id}. " f"The quota in the database {self.quota} and Kubernetes {quota} do not match. " f"Using the quota {quota} in the response." ) + cluster = None if self.cluster is None else self.cluster.dump() return models.ResourcePool( id=self.id, name=self.name, @@ -199,6 +271,7 @@ def dump( hibernation_threshold=self.hibernation_threshold, public=self.public, default=self.default, + cluster=cluster, ) diff --git a/bases/renku_data_services/data_api/server_options.py b/components/renku_data_services/crc/server_options.py similarity index 96% rename from bases/renku_data_services/data_api/server_options.py rename to components/renku_data_services/crc/server_options.py index 3e52f889d..ee757626f 100644 --- a/bases/renku_data_services/data_api/server_options.py +++ b/components/renku_data_services/crc/server_options.py @@ -117,7 +117,7 @@ def generate_default_resource_pool( "gpu_request": "gpu", } class_names = _get_classname() - largest_attribute_options = getattr(getattr(server_options, largest_attribute), "options") + largest_attribute_options = getattr(server_options, largest_attribute).options max_storage = round(max(server_options.disk_request.options) / 1_000_000_000) for ival, val in enumerate(sorted(largest_attribute_options)): cls = {} @@ -125,7 +125,7 @@ def generate_default_resource_pool( if largest_attribute == old_name: cls[new_name] = val else: - options = getattr(getattr(server_options, old_name), "options") + options = getattr(server_options, old_name).options try: cls[new_name] = options[ival] except IndexError: diff --git a/components/renku_data_services/data_connectors/api.spec.yaml b/components/renku_data_services/data_connectors/api.spec.yaml index 90b68377e..f5bc0094c 100644 --- a/components/renku_data_services/data_connectors/api.spec.yaml +++ b/components/renku_data_services/data_connectors/api.spec.yaml @@ -70,6 +70,32 @@ paths: $ref: "#/components/responses/Error" tags: - data_connectors + /data_connectors/global: + post: + summary: Create a new data connector + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/GlobalDataConnectorPost" + responses: + "200": + description: The data connector already exists + content: + application/json: + schema: + $ref: "#/components/schemas/DataConnector" + "201": + description: The data connector was created + content: + application/json: + schema: + $ref: "#/components/schemas/DataConnector" + default: + $ref: "#/components/responses/Error" + tags: + - data_connectors /data_connectors/{data_connector_id}: parameters: - in: path @@ -133,6 +159,32 @@ paths: $ref: "#/components/responses/Error" tags: - data_connectors + /data_connectors/global/{slug}: + parameters: + - in: path + name: slug + required: true + schema: + type: string + get: + summary: Get a global data connector by slug + responses: + "200": + description: The data connector + content: + application/json: + schema: + $ref: "#/components/schemas/DataConnector" + "404": + description: The data connector does not exist + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - data_connectors /namespaces/{namespace}/data_connectors/{slug}: parameters: - in: path @@ -164,6 +216,42 @@ paths: $ref: "#/components/responses/Error" tags: - data_connectors + /namespaces/{namespace}/projects/{project}/data_connectors/{slug}: + parameters: + - in: path + name: namespace + required: true + schema: + type: string + - in: path + name: project + required: true + schema: + type: string + - in: path + name: slug + required: true + schema: + type: string + get: + summary: Get a data connector that is owned by a namespace by its slug + responses: + "200": + description: The data connector + content: + application/json: + schema: + $ref: "#/components/schemas/DataConnector" + "404": + description: The data connector does not exist + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - data_connectors /data_connectors/{data_connector_id}/permissions: parameters: - in: path @@ -310,6 +398,48 @@ paths: $ref: "#/components/responses/Error" tags: - data_connectors + /projects/{project_id}/data_connector_links: + parameters: + - in: path + name: project_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + description: the ID of the project + get: + summary: Get all links from data connectors to a given project + responses: + "200": + description: List of data connector to project links + content: + application/json: + schema: + $ref: "#/components/schemas/DataConnectorToProjectLinksList" + default: + $ref: "#/components/responses/Error" + tags: + - projects + /projects/{project_id}/inaccessible_data_connector_links: + parameters: + - in: path + name: project_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + description: the ID of the project + get: + summary: Get the number of links that the user does not have access to in a project + responses: + "200": + description: List of data connector to project links + content: + application/json: + schema: + $ref: "#/components/schemas/InaccessibleDataConnectorLinks" + default: + $ref: "#/components/responses/Error" + tags: + - projects components: schemas: DataConnectorsList: @@ -328,9 +458,9 @@ components: name: $ref: "#/components/schemas/DataConnectorName" namespace: - $ref: "#/components/schemas/Slug" + $ref: "#/components/schemas/SlugResponse" slug: - $ref: "#/components/schemas/Slug" + $ref: "#/components/schemas/SlugResponse" storage: $ref: "#/components/schemas/CloudStorageCore" creation_date: @@ -348,7 +478,6 @@ components: required: - id - name - - namespace - slug - storage - creation_date @@ -364,7 +493,7 @@ components: name: $ref: "#/components/schemas/DataConnectorName" namespace: - $ref: "#/components/schemas/Slug" + $ref: "#/components/schemas/OneOrTwoSlugs" slug: $ref: "#/components/schemas/Slug" storage: @@ -380,7 +509,18 @@ components: $ref: "#/components/schemas/KeywordsList" required: - name - - namespace + - storage + GlobalDataConnectorPost: + description: | + A global data connector to be created in Renku 2.0 + type: object + additionalProperties: false + properties: + storage: + oneOf: + - $ref: "#/components/schemas/CloudStorageCorePost" + - $ref: "#/components/schemas/CloudStorageUrlV2" + required: - storage DataConnectorPatch: description: | @@ -391,7 +531,7 @@ components: name: $ref: "#/components/schemas/DataConnectorName" namespace: - $ref: "#/components/schemas/Slug" + $ref: "#/components/schemas/OneOrTwoSlugs" slug: $ref: "#/components/schemas/Slug" storage: @@ -564,6 +704,7 @@ components: minLength: 1 maxLength: 5000 nullable: true + example: My secret value RCloneOption: type: object description: Single field on an RClone storage, like "remote" or "access_key_id" @@ -604,6 +745,9 @@ components: provider: type: string description: The provider this value is applicable for. Empty if valid for all providers. + required: + - value + - help required: type: boolean description: whether the option is required or not @@ -622,7 +766,18 @@ components: type: type: string description: data type of option value. RClone has more options but they map to the ones listed here. - enum: ["int", "bool", "string", "Time", "Duration", "MultiEncoder", "SizeSuffix", "SpaceSepList", "CommaSepList", "Tristate"] + enum: ["int", "bool", "string", "stringArray", "Time", "Duration", "MultiEncoder", "SizeSuffix", "SpaceSepList", "CommaSepList", "Tristate", "Encoding", "Bits"] + required: + - name + - help + - default + - default_str + - required + - ispassword + - sensitive + - advanced + - exclusive + - type Ulid: description: ULID identifier type: string @@ -643,6 +798,18 @@ components: # - cannot contain uppercase characters pattern: '^(?!.*\.git$|.*\.atom$|.*[\-._][\-._].*)[a-z0-9][a-z0-9\-_.]*$' example: "a-slug-example" + SlugResponse: + description: A command-line/url friendly name for a namespace + type: string + minLength: 1 + example: "a-slug-example" + OneOrTwoSlugs: + description: A command-line/url friendly name for a single slug or two slugs separated by / + type: string + minLength: 1 + maxLength: 200 + pattern: '^(?!.*\.git$|.*\.atom$|.*[\-._][\-._].*)[a-z0-9][a-z0-9\-_.]*(? BlueprintFactoryResponse: """List data connectors.""" @@ -57,10 +66,21 @@ async def _get_all( query: apispec.DataConnectorsGetQuery, validator: RCloneValidator, ) -> tuple[list[dict[str, Any]], int]: + ns_segments = query.namespace.split("/") + ns: None | NamespacePath | ProjectPath + if len(ns_segments) == 0 or (len(ns_segments) == 1 and len(ns_segments[0]) == 0): + ns = None + elif len(ns_segments) == 1 and len(ns_segments[0]) > 0: + ns = NamespacePath.from_strings(*ns_segments) + elif len(ns_segments) == 2: + ns = ProjectPath.from_strings(*ns_segments) + else: + raise errors.ValidationError( + message="Got an unexpected number of path segments for the data connector namespace" + " in the request query parameter, expected 0, 1 or 2" + ) data_connectors, total_num = await self.data_connector_repo.get_data_connectors( - user=user, - pagination=pagination, - namespace=query.namespace, + user=user, pagination=pagination, namespace=ns ) return [ validate_and_dump( @@ -82,7 +102,10 @@ async def _post( _: Request, user: base_models.APIUser, body: apispec.DataConnectorPost, validator: RCloneValidator ) -> JSONResponse: data_connector = validate_unsaved_data_connector(body, validator=validator) - result = await self.data_connector_repo.insert_data_connector(user=user, data_connector=data_connector) + result = await self.data_connector_repo.insert_namespaced_data_connector( + user=user, data_connector=data_connector + ) + await self.metrics.data_connector_created(user) return validated_json( apispec.DataConnector, self._dump_data_connector(result, validator=validator), @@ -91,6 +114,27 @@ async def _post( return "/data_connectors", ["POST"], _post + def post_global(self) -> BlueprintFactoryResponse: + """Create a new global data connector.""" + + @authenticate(self.authenticator) + @only_authenticated + @validate(json=apispec.GlobalDataConnectorPost) + async def _post_global( + _: Request, user: base_models.APIUser, body: apispec.GlobalDataConnectorPost, validator: RCloneValidator + ) -> JSONResponse: + data_connector = await prevalidate_unsaved_global_data_connector(body, validator=validator) + result, inserted = await self.data_connector_repo.insert_global_data_connector( + user=user, data_connector=data_connector, validator=validator + ) + return validated_json( + apispec.DataConnector, + self._dump_data_connector(result, validator=validator), + status=201 if inserted else 200, + ) + + return "/data_connectors/global", ["POST"], _post_global + def get_one(self) -> BlueprintFactoryResponse: """Get a specific data connector.""" @@ -115,6 +159,32 @@ async def _get_one( return "/data_connectors/", ["GET"], _get_one + def get_one_global_by_slug(self) -> BlueprintFactoryResponse: + """Get a specific global data connector by slug.""" + + @authenticate(self.authenticator) + @extract_if_none_match + async def _get_one_global_by_slug( + _: Request, + user: base_models.APIUser, + slug: Slug, + etag: str | None, + validator: RCloneValidator, + ) -> HTTPResponse: + data_connector = await self.data_connector_repo.get_global_data_connector_by_slug(user=user, slug=slug) + + if data_connector.etag == etag: + return HTTPResponse(status=304) + + headers = {"ETag": data_connector.etag} + return validated_json( + apispec.DataConnector, + self._dump_data_connector(data_connector, validator=validator), + headers=headers, + ) + + return "/data_connectors/global/", ["GET"], _get_one_global_by_slug + def get_one_by_slug(self) -> BlueprintFactoryResponse: """Get a specific data connector by namespace/entity slug.""" @@ -124,12 +194,13 @@ async def _get_one_by_slug( _: Request, user: base_models.APIUser, namespace: str, - slug: str, + slug: Slug, etag: str | None, validator: RCloneValidator, ) -> HTTPResponse: data_connector = await self.data_connector_repo.get_data_connector_by_slug( - user=user, namespace=namespace, slug=slug + user=user, + path=DataConnectorPath.from_strings(namespace, slug.value), ) if data_connector.etag == etag: @@ -144,6 +215,43 @@ async def _get_one_by_slug( return "/namespaces//data_connectors/", ["GET"], _get_one_by_slug + def get_one_by_slug_from_project_namespace(self) -> BlueprintFactoryResponse: + """Get a specific data connector by namespace/project_slug/dc_slug slug.""" + + @authenticate(self.authenticator) + @extract_if_none_match + async def _get_one_from_project_namespace( + _: Request, + user: base_models.APIUser, + ns_slug: Slug, + project_slug: Slug, + dc_slug: Slug, + etag: str | None, + validator: RCloneValidator, + ) -> HTTPResponse: + dc_path = DataConnectorInProjectPath.from_strings( + ns_slug.value, + project_slug.value, + dc_slug.value, + ) + data_connector = await self.data_connector_repo.get_data_connector_by_slug(user=user, path=dc_path) + + if data_connector.etag == etag: + return HTTPResponse(status=304) + + headers = {"ETag": data_connector.etag} + return validated_json( + apispec.DataConnector, + self._dump_data_connector(data_connector, validator=validator), + headers=headers, + ) + + return ( + "/namespaces//projects//data_connectors/", + ["GET"], + _get_one_from_project_namespace, + ) + def patch(self) -> BlueprintFactoryResponse: """Partially update a data connector.""" @@ -210,9 +318,7 @@ async def _get_all_project_links( user: base_models.APIUser, data_connector_id: ULID, ) -> JSONResponse: - links = await self.data_connector_to_project_link_repo.get_links_from( - user=user, data_connector_id=data_connector_id - ) + links = await self.data_connector_repo.get_links_from(user=user, data_connector_id=data_connector_id) return validated_json( apispec.DataConnectorToProjectLinksList, [self._dump_data_connector_to_project_link(link) for link in links], @@ -236,7 +342,8 @@ async def _post_project_link( data_connector_id=data_connector_id, project_id=ULID.from_str(body.project_id), ) - link = await self.data_connector_to_project_link_repo.insert_link(user=user, link=unsaved_link) + link = await self.data_connector_repo.insert_link(user=user, link=unsaved_link) + await self.metrics.data_connector_linked(user) return validated_json( apispec.DataConnectorToProjectLink, self._dump_data_connector_to_project_link(link), status=201 ) @@ -254,9 +361,7 @@ async def _delete_project_link( data_connector_id: ULID, link_id: ULID, ) -> HTTPResponse: - await self.data_connector_to_project_link_repo.delete_link( - user=user, data_connector_id=data_connector_id, link_id=link_id - ) + await self.data_connector_repo.delete_link(user=user, data_connector_id=data_connector_id, link_id=link_id) return HTTPResponse(status=204) return ( @@ -274,7 +379,7 @@ async def _get_all_data_connectors_links_to_project( user: base_models.APIUser, project_id: ULID, ) -> JSONResponse: - links = await self.data_connector_to_project_link_repo.get_links_to(user=user, project_id=project_id) + links = await self.data_connector_repo.get_links_to(user=user, project_id=project_id) return validated_json( apispec.DataConnectorToProjectLinksList, [self._dump_data_connector_to_project_link(link) for link in links], @@ -282,6 +387,26 @@ async def _get_all_data_connectors_links_to_project( return "/projects//data_connector_links", ["GET"], _get_all_data_connectors_links_to_project + def get_inaccessible_data_connectors_links_to_project(self) -> BlueprintFactoryResponse: + """The number of data connector links in a given project the user has no access to.""" + + @authenticate(self.authenticator) + async def _get_inaccessible_data_connectors_links_to_project( + _: Request, + user: base_models.APIUser, + project_id: ULID, + ) -> JSONResponse: + link_ids = await self.data_connector_repo.get_inaccessible_links_to_project( + user=user, project_id=project_id + ) + return validated_json(apispec.InaccessibleDataConnectorLinks, {"count": len(link_ids)}) + + return ( + "/projects//inaccessible_data_connector_links", + ["GET"], + _get_inaccessible_data_connectors_links_to_project, + ) + def get_secrets(self) -> BlueprintFactoryResponse: """List all saved secrets for a data connector.""" @@ -386,13 +511,28 @@ async def _delete_secrets( return "/data_connectors//secrets", ["DELETE"], _delete_secrets @staticmethod - def _dump_data_connector(data_connector: models.DataConnector, validator: RCloneValidator) -> dict[str, Any]: + def _dump_data_connector( + data_connector: models.DataConnector | models.GlobalDataConnector, validator: RCloneValidator + ) -> dict[str, Any]: """Dumps a data connector for API responses.""" storage = dump_storage_with_sensitive_fields(data_connector.storage, validator=validator) + if data_connector.namespace is None: + return dict( + id=str(data_connector.id), + name=data_connector.name, + slug=data_connector.slug, + storage=storage, + creation_date=data_connector.creation_date, + created_by=data_connector.created_by, + visibility=data_connector.visibility.value, + description=data_connector.description, + etag=data_connector.etag, + keywords=data_connector.keywords or [], + ) return dict( id=str(data_connector.id), name=data_connector.name, - namespace=data_connector.namespace.slug, + namespace=data_connector.namespace.path.serialize(), slug=data_connector.slug, storage=storage, # secrets=, diff --git a/components/renku_data_services/data_connectors/core.py b/components/renku_data_services/data_connectors/core.py index 64a65ccdb..193a883af 100644 --- a/components/renku_data_services/data_connectors/core.py +++ b/components/renku_data_services/data_connectors/core.py @@ -1,11 +1,21 @@ """Business logic for data connectors.""" +import contextlib +import re from dataclasses import asdict +from html.parser import HTMLParser from typing import Any +from pydantic import ValidationError as PydanticValidationError + from renku_data_services import base_models, errors from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.core import ( + NamespacePath, + ProjectPath, +) from renku_data_services.data_connectors import apispec, models +from renku_data_services.data_connectors.doi.metadata import get_dataset_metadata from renku_data_services.storage import models as storage_models from renku_data_services.storage.rclone import RCloneValidator @@ -14,9 +24,21 @@ def dump_storage_with_sensitive_fields( storage: models.CloudStorageCore, validator: RCloneValidator ) -> models.CloudStorageCoreWithSensitiveFields: """Add sensitive fields to a storage configuration.""" - return models.CloudStorageCoreWithSensitiveFields( - sensitive_fields=list(validator.get_private_fields(storage.configuration)), **asdict(storage) - ) + try: + body = models.CloudStorageCoreWithSensitiveFields( + sensitive_fields=[ + apispec.RCloneOption.model_validate(option.model_dump(exclude_none=True, by_alias=True)) + for option in validator.get_private_fields(storage.configuration) + ], + **asdict(storage), + ) + except PydanticValidationError as err: + parts = [".".join(str(i) for i in field["loc"]) + ": " + field["msg"] for field in err.errors()] + message = ( + f"The server could not construct a valid response. Errors found in the following fields: {', '.join(parts)}" + ) + raise errors.ProgrammingError(message=message) from err + return body def validate_unsaved_storage( @@ -60,9 +82,23 @@ def validate_unsaved_data_connector( keywords = [kw.root for kw in body.keywords] if body.keywords is not None else [] storage = validate_unsaved_storage(body.storage, validator=validator) + if body.namespace is None: + raise NotImplementedError("Missing namespace not supported") + + slugs = body.namespace.split("/") + path: NamespacePath | ProjectPath + if len(slugs) == 1: + path = NamespacePath.from_strings(*slugs) + elif len(slugs) == 2: + path = ProjectPath.from_strings(*slugs) + else: + raise errors.ValidationError( + message=f"Got an unexpected number of slugs in the namespace for a data connector {slugs}" + ) + return models.UnsavedDataConnector( name=body.name, - namespace=body.namespace, + namespace=path, slug=body.slug or base_models.Slug.from_name(body.name).value, visibility=Visibility(body.visibility.value), created_by="", @@ -72,6 +108,106 @@ def validate_unsaved_data_connector( ) +async def prevalidate_unsaved_global_data_connector( + body: apispec.GlobalDataConnectorPost, validator: RCloneValidator +) -> models.UnsavedGlobalDataConnector: + """Pre-validate an unsaved data connector.""" + + storage = validate_unsaved_storage(body.storage, validator=validator) + # TODO: allow admins to create global data connectors, e.g. s3://giab + if storage.storage_type != "doi": + raise errors.ValidationError(message="Only doi storage type is allowed for global data connectors") + if not storage.readonly: + raise errors.ValidationError(message="Global data connectors must be read-only") + + rclone_metadata = await validator.get_doi_metadata(configuration=storage.configuration) + + doi_uri = f"doi:{rclone_metadata.doi}" + slug = base_models.Slug.from_name(doi_uri).value + + # Override provider in storage config + storage.configuration["provider"] = rclone_metadata.provider + + return models.UnsavedGlobalDataConnector( + name=doi_uri, + slug=slug, + visibility=Visibility.PUBLIC, + created_by="", + storage=storage, + description=None, + keywords=[], + ) + + +async def validate_unsaved_global_data_connector( + data_connector: models.UnsavedGlobalDataConnector, + validator: RCloneValidator, +) -> models.UnsavedGlobalDataConnector: + """Validate an unsaved data connector.""" + + # Check that we can list the files in the DOI + connection_result = await validator.test_connection( + configuration=data_connector.storage.configuration, source_path="/" + ) + if not connection_result.success: + raise errors.ValidationError( + message="The provided storage configuration is not currently working", detail=connection_result.error + ) + + # Fetch DOI metadata + rclone_metadata = await validator.get_doi_metadata(configuration=data_connector.storage.configuration) + if rclone_metadata is None: + raise errors.ValidationError( + message=f"Could not resolve DOI {data_connector.storage.configuration.get("doi", "")}" + ) + metadata = await get_dataset_metadata(rclone_metadata=rclone_metadata) + + name = data_connector.name + description = "" + keywords: list[str] = [] + if metadata is not None: + name = metadata.name or name + description = _html_to_text(metadata.description) + keywords = metadata.keywords + + # Fix metadata if needed + if len(name) > 99: + name = f"{name[:96]}..." + if len(description) > 500: + description = f"{description[:497]}..." + fixed_keywords: list[str] = [] + for word in keywords: + for kw in word.strip().split(","): + with contextlib.suppress(PydanticValidationError): + fixed_keywords.append(apispec.Keyword.model_validate(kw.strip()).root) + keywords = fixed_keywords + + # Assign user-friendly target_path if possible + target_path = data_connector.slug + with contextlib.suppress(errors.ValidationError): + name_slug = base_models.Slug.from_name(name).value + target_path = base_models.Slug.from_name(f"{name_slug[:30]}-{target_path}").value + + # Override source_path and target_path + storage = models.CloudStorageCore( + storage_type=data_connector.storage.storage_type, + configuration=data_connector.storage.configuration, + source_path="/", + target_path=target_path, + readonly=data_connector.storage.readonly, + ) + + return models.UnsavedGlobalDataConnector( + name=name, + slug=data_connector.slug, + visibility=Visibility.PUBLIC, + created_by="", + storage=storage, + description=description or None, + keywords=keywords, + ) + + def validate_storage_patch( storage: models.CloudStorageCore, patch: apispec.CloudStorageCorePatch, validator: RCloneValidator ) -> models.CloudStorageCorePatch: @@ -97,11 +233,32 @@ def validate_storage_patch( def validate_data_connector_patch( - data_connector: models.DataConnector, + data_connector: models.DataConnector | models.GlobalDataConnector, patch: apispec.DataConnectorPatch, validator: RCloneValidator, ) -> models.DataConnectorPatch: """Validate the update to a data connector.""" + if isinstance(data_connector, models.GlobalDataConnector) and patch.namespace is not None: + raise errors.ValidationError(message="Assigning a namespace to a global data connector is not supported") + if ( + isinstance(data_connector, models.GlobalDataConnector) + and patch.slug is not None + and patch.slug != data_connector.slug + ): + raise errors.ValidationError(message="Updating the slug of a global data connector is not supported") + + slugs = patch.namespace.split("/") if patch.namespace else [] + path: NamespacePath | ProjectPath | None + if len(slugs) == 0: + path = None + elif len(slugs) == 1: + path = NamespacePath.from_strings(*slugs) + elif len(slugs) == 2: + path = ProjectPath.from_strings(*slugs) + else: + raise errors.ValidationError( + message="Trying to create a data connector with more than invalid number of slugs in its namespace" + ) keywords = [kw.root for kw in patch.keywords] if patch.keywords is not None else None storage = ( @@ -112,7 +269,7 @@ def validate_data_connector_patch( return models.DataConnectorPatch( name=patch.name, - namespace=patch.namespace, + namespace=path, slug=patch.slug, visibility=Visibility(patch.visibility.value) if patch.visibility is not None else None, description=patch.description, @@ -138,3 +295,37 @@ def validate_data_connector_secrets_patch( ) for secret in put.root ] + + +def _html_to_text(html: str) -> str: + """Returns the text content of an html snippet.""" + try: + f = _HTMLToText() + f.feed(html) + content = f.text + + # Cleanup whitespace characters + content = content.strip() + content = content.strip("\n") + content = re.sub(" ( )+", " ", content) + content = re.sub("\n\n(\n)+", "\n\n", content) + content = re.sub("\n( )+", "\n", content) + + return content + except Exception: + return html + + +class _HTMLToText(HTMLParser): + """Parses HTML into text content.""" + + def __init__(self, *, convert_charrefs: bool = True) -> None: + super().__init__(convert_charrefs=convert_charrefs) + self._text = "" + + @property + def text(self) -> str: + return self._text + + def handle_data(self, data: str) -> None: + self._text += data diff --git a/components/renku_data_services/data_connectors/db.py b/components/renku_data_services/data_connectors/db.py index 4b6995296..96a1a30f0 100644 --- a/components/renku_data_services/data_connectors/db.py +++ b/components/renku_data_services/data_connectors/db.py @@ -1,24 +1,47 @@ """Adapters for data connectors database classes.""" -from collections.abc import AsyncIterator, Callable +import random +import string +from collections.abc import AsyncIterator, Callable, Sequence +from contextlib import suppress from datetime import datetime from typing import TypeVar from cryptography.hazmat.primitives.asymmetric import rsa -from sqlalchemy import Select, delete, func, select +from sqlalchemy import ColumnExpressionArgument, Select, delete, func, select from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import joinedload from ulid import ULID from renku_data_services import base_models, errors from renku_data_services.authz.authz import Authz, AuthzOperation, ResourceType from renku_data_services.authz.models import CheckPermissionItem, Scope from renku_data_services.base_api.pagination import PaginationRequest +from renku_data_services.base_models.core import ( + DataConnectorInProjectPath, + DataConnectorPath, + DataConnectorSlug, + InternalServiceAdmin, + NamespacePath, + NamespaceSlug, + ProjectPath, + ProjectSlug, +) from renku_data_services.data_connectors import apispec, models from renku_data_services.data_connectors import orm as schemas +from renku_data_services.data_connectors.core import validate_unsaved_global_data_connector from renku_data_services.namespace import orm as ns_schemas +from renku_data_services.namespace.db import GroupRepository +from renku_data_services.namespace.models import ProjectNamespace +from renku_data_services.project.db import ProjectRepository +from renku_data_services.project.models import Project +from renku_data_services.project.orm import ProjectORM +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.decorators import update_search_document from renku_data_services.secrets import orm as secrets_schemas from renku_data_services.secrets.core import encrypt_user_secret from renku_data_services.secrets.models import SecretKind +from renku_data_services.storage.rclone import RCloneValidator from renku_data_services.users.db import UserRepo from renku_data_services.utils.core import with_db_transaction @@ -30,29 +53,44 @@ def __init__( self, session_maker: Callable[..., AsyncSession], authz: Authz, + project_repo: ProjectRepository, + group_repo: GroupRepository, + search_updates_repo: SearchUpdatesRepo, ) -> None: self.session_maker = session_maker self.authz = authz + self.project_repo = project_repo + self.group_repo = group_repo + self.search_updates_repo = search_updates_repo async def get_data_connectors( - self, user: base_models.APIUser, pagination: PaginationRequest, namespace: str | None = None - ) -> tuple[list[models.DataConnector], int]: + self, + user: base_models.APIUser, + pagination: PaginationRequest, + namespace: ProjectPath | NamespacePath | None = None, + ) -> tuple[list[models.DataConnector | models.GlobalDataConnector], int]: """Get multiple data connectors from the database.""" - data_connector_ids = await self.authz.resources_with_permission( - user, user.id, ResourceType.data_connector, Scope.READ - ) + + async def restrict_by_read(stmt: Select) -> Select: + if isinstance(user, InternalServiceAdmin): + return stmt + else: + dc_ids = await self.authz.resources_with_permission( + user, user.id, ResourceType.data_connector, Scope.READ + ) + return stmt.where(schemas.DataConnectorORM.id.in_(dc_ids)) async with self.session_maker() as session: - stmt = select(schemas.DataConnectorORM).where(schemas.DataConnectorORM.id.in_(data_connector_ids)) + stmt = (await restrict_by_read(select(schemas.DataConnectorORM))).options( + joinedload(schemas.DataConnectorORM.slug) + .joinedload(ns_schemas.EntitySlugORM.project) + .joinedload(ProjectORM.slug) + ) if namespace: stmt = _filter_by_namespace_slug(stmt, namespace) stmt = stmt.limit(pagination.per_page).offset(pagination.offset) stmt = stmt.order_by(schemas.DataConnectorORM.id.desc()) - stmt_count = ( - select(func.count()) - .select_from(schemas.DataConnectorORM) - .where(schemas.DataConnectorORM.id.in_(data_connector_ids)) - ) + stmt_count = await restrict_by_read(select(func.count()).select_from(schemas.DataConnectorORM)) if namespace: stmt_count = _filter_by_namespace_slug(stmt_count, namespace) results = await session.scalars(stmt), await session.scalar(stmt_count) @@ -64,7 +102,7 @@ async def get_data_connector( self, user: base_models.APIUser, data_connector_id: ULID, - ) -> models.DataConnector: + ) -> models.DataConnector | models.GlobalDataConnector: """Get one data connector from the database.""" not_found_msg = f"Data connector with id '{data_connector_id}' does not exist or you do not have access to it." @@ -74,27 +112,103 @@ async def get_data_connector( async with self.session_maker() as session: result = await session.scalars( - select(schemas.DataConnectorORM).where(schemas.DataConnectorORM.id == data_connector_id) + select(schemas.DataConnectorORM) + .where(schemas.DataConnectorORM.id == data_connector_id) + .options( + joinedload(schemas.DataConnectorORM.slug) + .joinedload(ns_schemas.EntitySlugORM.project) + .joinedload(ProjectORM.slug) + ) ) data_connector = result.one_or_none() if data_connector is None: raise errors.MissingResourceError(message=not_found_msg) return data_connector.dump() + async def get_data_connectors_names_and_ids( + self, + user: base_models.APIUser, + data_connector_ids: list[ULID], + ) -> list[tuple[str, str]]: + """Get names of the data connectors. + + Don't check permissions for logged-in users since this is used to get names when users cannot copy data + connectors due to lack of permission. + """ + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session: + stmt = select(schemas.DataConnectorORM).where(schemas.DataConnectorORM.id.in_(data_connector_ids)) + result = await session.scalars(stmt) + data_connectors_orms = result.all() + + return [ + (dc.name, f"{dc.slug.namespace.slug}/{dc.slug.slug}" if dc.slug is not None else f"{dc.global_slug}") + for dc in data_connectors_orms + ] + async def get_data_connector_by_slug( - self, user: base_models.APIUser, namespace: str, slug: str - ) -> models.DataConnector: - """Get one data connector from the database by slug.""" + self, + user: base_models.APIUser, + path: DataConnectorInProjectPath | DataConnectorPath, + ) -> models.DataConnector | models.GlobalDataConnector: + """Get one data connector from the database by slug. + + This will not return or find data connectors owned by projects. + """ not_found_msg = ( - f"Data connector with identifier '{namespace}/{slug}' does not exist or you do not have access to it." + f"Data connector with identifier '{path.serialize()}' does not exist or you do not have access to it." ) async with self.session_maker() as session: stmt = select(schemas.DataConnectorORM) - stmt = _filter_by_namespace_slug(stmt, namespace) - stmt = stmt.where(ns_schemas.EntitySlugORM.slug == slug.lower()) + stmt = _filter_by_namespace_slug(stmt, path.parent()) + stmt = stmt.where( + schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.slug == path.last().value.lower(), + ) + ) result = await session.scalars(stmt) data_connector = result.one_or_none() + + if data_connector is None: + # Try to find the slugs passed in the tables of old slugs + queries = _old_data_connector_slug_queries(path) + for query in queries: + result_old = await session.scalar(query) + if result_old is not None: + data_connector = result_old + break + + if data_connector is None: + raise errors.MissingResourceError(message=not_found_msg) + + authorized = await self.authz.has_permission( + user=user, + resource_type=ResourceType.data_connector, + resource_id=data_connector.id, + scope=Scope.READ, + ) + if not authorized: + raise errors.MissingResourceError(message=not_found_msg) + + return data_connector.dump() + + async def get_global_data_connector_by_slug( + self, + user: base_models.APIUser, + slug: base_models.Slug, + ) -> models.DataConnector | models.GlobalDataConnector: + """Get one global data connector from the database by slug.""" + not_found_msg = f"Data connector with identifier '{slug}' does not exist or you do not have access to it." + + async with self.session_maker() as session: + stmt = select(schemas.DataConnectorORM).where(schemas.DataConnectorORM.global_slug == slug.value.lower()) + result = await session.scalars(stmt) + data_connector = result.one_or_none() + + # TODO: find from old slug for global data connectors if data_connector is None: raise errors.MissingResourceError(message=not_found_msg) @@ -111,47 +225,95 @@ async def get_data_connector_by_slug( @with_db_transaction @Authz.authz_change(AuthzOperation.create, ResourceType.data_connector) - async def insert_data_connector( + @update_search_document + async def _insert_data_connector( self, user: base_models.APIUser, - data_connector: models.UnsavedDataConnector, + data_connector: models.UnsavedDataConnector | models.UnsavedGlobalDataConnector, *, session: AsyncSession | None = None, - ) -> models.DataConnector: + ) -> models.DataConnector | models.GlobalDataConnector: """Insert a new data connector entry.""" if not session: raise errors.ProgrammingError(message="A database session is required.") - ns = await session.scalar( - select(ns_schemas.NamespaceORM).where(ns_schemas.NamespaceORM.slug == data_connector.namespace.lower()) - ) - if not ns: - raise errors.MissingResourceError( - message=f"The data connector cannot be created because the namespace {data_connector.namespace} does not exist." # noqa E501 + ns: ns_schemas.NamespaceORM | None = None + if isinstance(data_connector, models.UnsavedDataConnector): + ns = await session.scalar( + select(ns_schemas.NamespaceORM).where( + ns_schemas.NamespaceORM.slug == data_connector.namespace.first.value.lower() + ) ) - if not ns.group_id and not ns.user_id: - raise errors.ProgrammingError(message="Found a namespace that has no group or user associated with it.") + if not ns: + raise errors.MissingResourceError( + message=f"The data connector cannot be created because the namespace {data_connector.namespace} does not exist." # noqa E501 + ) + if not ns.group_id and not ns.user_id: + raise errors.ProgrammingError(message="Found a namespace that has no group or user associated with it.") if user.id is None: raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") - resource_type, resource_id = ( - (ResourceType.group, ns.group_id) if ns.group and ns.group_id else (ResourceType.user_namespace, ns.id) - ) - has_permission = await self.authz.has_permission(user, resource_type, resource_id, Scope.WRITE) - if not has_permission: - raise errors.ForbiddenError( - message=f"The data connector cannot be created because you do not have sufficient permissions with the namespace {data_connector.namespace}" # noqa: E501 + project: Project | None = None + if ns is None: + pass + elif isinstance(data_connector.namespace, ProjectPath): + error_msg = ( + f"The project with slug {data_connector.namespace} does not exist or you do not have access to it." + ) + project_slug = await session.scalar( + select(ns_schemas.EntitySlugORM) + .where(ns_schemas.EntitySlugORM.namespace_id == ns.id) + .where(ns_schemas.EntitySlugORM.slug == data_connector.namespace.second.value.lower()) + .where(ns_schemas.EntitySlugORM.project_id.is_not(None)) + .where(ns_schemas.EntitySlugORM.data_connector_id.is_(None)) ) + if not project_slug or not project_slug.project_id: + raise errors.MissingResourceError(message=error_msg) + if project_slug.dump_project_namespace().path != data_connector.namespace: + raise errors.ProgrammingError( + message=f"Mismatched project slug '{project_slug.dump_project_namespace().path}' and data connector namespace '{data_connector.namespace}'" # noqa E501 + ) + project = await self.project_repo.get_project(user, project_slug.project_id) + if not project: + raise errors.MissingResourceError(message=error_msg) + resource_type = ResourceType.project + resource_id = project.id + elif ns.group and ns.group_id: + resource_type, resource_id = (ResourceType.group, ns.group_id) + else: + resource_type, resource_id = (ResourceType.user_namespace, ns.id) + + if ns is not None: + has_permission = await self.authz.has_permission(user, resource_type, resource_id, Scope.WRITE) + if not has_permission: + error_msg = f"The data connector cannot be created because you do not have sufficient permissions with the namespace {data_connector.namespace}" # noqa: E501 + if isinstance(data_connector.namespace, ProjectPath): + error_msg = f"The data connector cannot be created because you do not have sufficient permissions with the project {data_connector.namespace}" # noqa: E501 + raise errors.ForbiddenError(message=error_msg) slug = data_connector.slug or base_models.Slug.from_name(data_connector.name).value - existing_slug = await session.scalar( - select(ns_schemas.EntitySlugORM) - .where(ns_schemas.EntitySlugORM.namespace_id == ns.id) - .where(ns_schemas.EntitySlugORM.slug == slug) - ) - if existing_slug is not None: - raise errors.ConflictError(message=f"An entity with the slug '{ns.slug}/{slug}' already exists.") + if ns is not None and isinstance(data_connector, models.UnsavedDataConnector): + existing_slug_stmt = ( + select(ns_schemas.EntitySlugORM) + .where(ns_schemas.EntitySlugORM.namespace_id == ns.id) + .where(ns_schemas.EntitySlugORM.slug == slug) + .where(ns_schemas.EntitySlugORM.data_connector_id.is_not(None)) + ) + if project: + existing_slug_stmt = existing_slug_stmt.where(ns_schemas.EntitySlugORM.project_id == project.id) + else: + existing_slug_stmt = existing_slug_stmt.where(ns_schemas.EntitySlugORM.project_id.is_(None)) + existing_slug = await session.scalar(existing_slug_stmt) + if existing_slug is not None: + raise errors.ConflictError(message=f"An entity with the slug '{data_connector.path}' already exists.") + elif isinstance(data_connector, models.UnsavedGlobalDataConnector): + existing_global_dc_stmt = select(schemas.DataConnectorORM).where( + schemas.DataConnectorORM.global_slug == slug + ) + existing_global_dc = await session.scalar(existing_global_dc_stmt) + if existing_global_dc is not None: + raise errors.ConflictError(message=f"An entity with the slug '{data_connector.slug}' already exists.") visibility_orm = ( apispec.Visibility(data_connector.visibility) @@ -169,20 +331,88 @@ async def insert_data_connector( created_by_id=user.id, description=data_connector.description, keywords=data_connector.keywords, + global_slug=slug if isinstance(data_connector, models.UnsavedGlobalDataConnector) else None, ) - data_connector_slug = ns_schemas.EntitySlugORM.create_data_connector_slug( - slug, data_connector_id=data_connector_orm.id, namespace_id=ns.id - ) + if ns is not None: + data_connector_slug = ns_schemas.EntitySlugORM.create_data_connector_slug( + slug, + data_connector_id=data_connector_orm.id, + namespace_id=ns.id, + project_id=project.id if project else None, + ) session.add(data_connector_orm) - session.add(data_connector_slug) + if ns is not None: + session.add(data_connector_slug) await session.flush() await session.refresh(data_connector_orm) + if ns is not None: + await session.refresh(data_connector_slug) + if project: + await session.refresh(data_connector_slug.project) return data_connector_orm.dump() + @with_db_transaction + async def insert_namespaced_data_connector( + self, + user: base_models.APIUser, + data_connector: models.UnsavedDataConnector, + *, + session: AsyncSession | None = None, + ) -> models.DataConnector: + """Insert a new namespaced data connector entry.""" + dc = await self._insert_data_connector(user=user, data_connector=data_connector, session=session) + if not isinstance(dc, models.DataConnector): + raise errors.ProgrammingError(message=f"Expected to get a namespaced data connector ('{dc.id}')") + return dc + + @with_db_transaction + async def insert_global_data_connector( + self, + user: base_models.APIUser, + data_connector: models.UnsavedGlobalDataConnector, + validator: RCloneValidator | None, + *, + session: AsyncSession | None = None, + ) -> tuple[models.GlobalDataConnector, bool]: + """Insert a new global data connector entry.""" + if not session: + raise errors.ProgrammingError(message="A database session is required.") + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + slug = data_connector.slug or base_models.Slug.from_name(data_connector.name).value + + existing_global_dc_stmt = select(schemas.DataConnectorORM).where(schemas.DataConnectorORM.global_slug == slug) + existing_global_dc = await session.scalar(existing_global_dc_stmt) + if existing_global_dc is not None: + dc = existing_global_dc.dump() + if not isinstance(dc, models.GlobalDataConnector): + raise errors.ProgrammingError(message=f"Expected to get a global data connector ('{dc.id}')") + authorized = await self.authz.has_permission(user, ResourceType.data_connector, dc.id, Scope.READ) + if not authorized: + raise errors.MissingResourceError( + message=f"Data connector with id '{dc.id}' does not exist or you do not have access to it." + ) + return dc, False + + # Fully validate a global data connector before inserting + if isinstance(data_connector, models.UnsavedGlobalDataConnector): + if validator is None: + raise RuntimeError("Could not validate global data connector") + data_connector = await validate_unsaved_global_data_connector( + data_connector=data_connector, validator=validator + ) + + dc = await self._insert_data_connector(user=user, data_connector=data_connector, session=session) + if not isinstance(dc, models.GlobalDataConnector): + raise errors.ProgrammingError(message=f"Expected to get a global data connector ('{dc.id}')") + return dc, True + @with_db_transaction @Authz.authz_change(AuthzOperation.update, ResourceType.data_connector) + @update_search_document async def update_data_connector( self, user: base_models.APIUser, @@ -198,31 +428,47 @@ async def update_data_connector( if not session: raise errors.ProgrammingError(message="A database session is required.") result = await session.scalars( - select(schemas.DataConnectorORM).where(schemas.DataConnectorORM.id == data_connector_id) + select(schemas.DataConnectorORM) + .where(schemas.DataConnectorORM.id == data_connector_id) + .options( + joinedload(schemas.DataConnectorORM.slug) + .joinedload(ns_schemas.EntitySlugORM.project) + .joinedload(ProjectORM.slug) + ) ) data_connector = result.one_or_none() if data_connector is None: raise errors.MissingResourceError(message=not_found_msg) old_data_connector = data_connector.dump() + # if old_data_connector.namespace is None: + # raise NotImplementedError("Update not supported yet.") + old_data_connector_parent = ( + old_data_connector.path.parent() if isinstance(old_data_connector, models.DataConnector) else None + ) + + if isinstance(old_data_connector, models.GlobalDataConnector) and patch.namespace: + raise errors.ValidationError(message="Moving a global data connector into a namespace is not supported.") required_scope = Scope.WRITE if patch.visibility is not None and patch.visibility != old_data_connector.visibility: # NOTE: changing the visibility requires the user to be owner which means they should have DELETE permission required_scope = Scope.DELETE - if patch.namespace is not None and patch.namespace != old_data_connector.namespace.slug: + if patch.namespace is not None and patch.namespace != old_data_connector_parent: # NOTE: changing the namespace requires the user to be owner which means they should have DELETE permission # noqa E501 required_scope = Scope.DELETE + if patch.slug is not None and patch.slug != old_data_connector.slug: + # NOTE: changing the slug requires the user to be owner which means they should have DELETE permission + required_scope = Scope.DELETE authorized = await self.authz.has_permission( user, ResourceType.data_connector, data_connector_id, required_scope ) if not authorized: raise errors.MissingResourceError(message=not_found_msg) - current_etag = data_connector.dump().etag + current_etag = old_data_connector.etag if current_etag != etag: raise errors.ConflictError(message=f"Current ETag is {current_etag}, not {etag}.") - # TODO: handle slug update if patch.name is not None: data_connector.name = patch.name if patch.visibility is not None: @@ -232,23 +478,51 @@ async def update_data_connector( else apispec.Visibility(patch.visibility.value) ) data_connector.visibility = visibility_orm - if patch.namespace is not None: - ns = await session.scalar( - select(ns_schemas.NamespaceORM).where(ns_schemas.NamespaceORM.slug == patch.namespace.lower()) - ) - if not ns: - raise errors.MissingResourceError(message=f"The namespace with slug {patch.namespace} does not exist.") - if not ns.group_id and not ns.user_id: - raise errors.ProgrammingError(message="Found a namespace that has no group or user associated with it.") - resource_type, resource_id = ( - (ResourceType.group, ns.group_id) if ns.group and ns.group_id else (ResourceType.user_namespace, ns.id) + if isinstance(old_data_connector, models.DataConnector) and ( + (patch.namespace is not None and patch.namespace != old_data_connector.namespace.path) + or (patch.slug is not None and patch.slug != old_data_connector.slug) + ): + match patch.namespace, patch.slug: + case (None, new_slug) if new_slug is not None: + new_path = old_data_connector.path.parent() / DataConnectorSlug(new_slug) + case (new_ns, None) if new_ns is not None: + new_path = new_ns / DataConnectorSlug(old_data_connector.slug) + case (new_ns, new_slug) if new_ns is not None and new_slug is not None: + new_path = new_ns / DataConnectorSlug(new_slug) + case _: + raise errors.ProgrammingError( + message=f"Moving a data connector from {old_data_connector.path.serialize()} " + f"to namespace: {patch.namespace} and slug: {patch.slug} is not supported." + ) + await self.group_repo.move_data_connector( + user, + old_data_connector, + new_path, + session, ) - has_permission = await self.authz.has_permission(user, resource_type, resource_id, Scope.WRITE) - if not has_permission: - raise errors.ForbiddenError( - message=f"The data connector cannot be moved because you do not have sufficient permissions with the namespace {patch.namespace}." # noqa: E501 + if isinstance(new_path, DataConnectorInProjectPath) and old_data_connector.path != new_path: + # Moving the data connector into a new project means that we should link it too + project = await self.project_repo.get_project_by_namespace_slug( + user, + namespace=new_path.first.value, + slug=new_path.second, + ) + link = models.UnsavedDataConnectorToProjectLink( + data_connector_id=data_connector_id, project_id=project.id ) - data_connector.slug.namespace_id = ns.id + with suppress(errors.ConflictError): + # If there is a conflict error it means the link already exists so we ignore it + await self.insert_link( + user, + link, + session=session, + ) + if ( + isinstance(old_data_connector, models.GlobalDataConnector) + and patch.slug is not None + and patch.slug != old_data_connector.slug + ): + data_connector.global_slug = patch.slug if patch.description is not None: data_connector.description = patch.description if patch.description else None if patch.keywords is not None: @@ -274,6 +548,7 @@ async def update_data_connector( @with_db_transaction @Authz.authz_change(AuthzOperation.delete, ResourceType.data_connector) + @update_search_document async def delete_data_connector( self, user: base_models.APIUser, @@ -326,18 +601,6 @@ async def get_data_connector_permissions( permissions.change_membership = True return permissions - -class DataConnectorProjectLinkRepository: - """Repository for links from data connectors to projects.""" - - def __init__( - self, - session_maker: Callable[..., AsyncSession], - authz: Authz, - ) -> None: - self.session_maker = session_maker - self.authz = authz - async def get_links_from( self, user: base_models.APIUser, data_connector_id: ULID ) -> list[models.DataConnectorToProjectLink]: @@ -370,22 +633,39 @@ async def get_links_to( message=f"Project with id '{project_id}' does not exist or you do not have access to it." ) - data_connector_ids = await self.authz.resources_with_permission( - user, user.id, ResourceType.data_connector, Scope.READ - ) + allowed_dcs = await self.authz.resources_with_permission(user, user.id, ResourceType.data_connector, Scope.READ) async with self.session_maker() as session: stmt = ( select(schemas.DataConnectorToProjectLinkORM) .where(schemas.DataConnectorToProjectLinkORM.project_id == project_id) - .where(schemas.DataConnectorToProjectLinkORM.data_connector_id.in_(data_connector_ids)) + .where(schemas.DataConnectorToProjectLinkORM.data_connector_id.in_(allowed_dcs)) ) result = await session.scalars(stmt) links_orm = result.all() return [link.dump() for link in links_orm] + async def get_inaccessible_links_to_project(self, user: base_models.APIUser, project_id: ULID) -> Sequence[ULID]: + """Get data connector link IDs in a project that the user has no access to.""" + authorized = await self.authz.has_permission(user, ResourceType.project, project_id, Scope.READ) + if not authorized: + raise errors.MissingResourceError( + message=f"Project with id '{project_id}' does not exist or you do not have access to it." + ) + + allowed_dcs = await self.authz.resources_with_permission(user, user.id, ResourceType.data_connector, Scope.READ) + + async with self.session_maker() as session: + stmt = ( + select(schemas.DataConnectorToProjectLinkORM.id) + .where(schemas.DataConnectorToProjectLinkORM.project_id == project_id) + .where(schemas.DataConnectorToProjectLinkORM.data_connector_id.not_in(allowed_dcs)) + ) + result = await session.scalars(stmt) + ulids = result.all() + return ulids + @with_db_transaction - @Authz.authz_change(AuthzOperation.create_link, ResourceType.data_connector) async def insert_link( self, user: base_models.APIUser, @@ -400,15 +680,29 @@ async def insert_link( if user.id is None: raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + dc_error_msg = ( + f"Data connector with id '{link.data_connector_id}' does not exist or you do not have access to it." + ) + allowed_from = await self.authz.has_permission( + user, ResourceType.data_connector, link.data_connector_id, Scope.READ + ) + if not allowed_from: + raise errors.MissingResourceError(message=dc_error_msg) + allowed_to = await self.authz.has_permission(user, ResourceType.project, link.project_id, Scope.WRITE) + if not allowed_to: + raise errors.MissingResourceError( + message=f"The user with ID {user.id} cannot perform operation {Scope.WRITE} " + f"on {ResourceType.project.value} " + f"with ID {link.project_id} or the resource does not exist." + ) + data_connector = ( await session.scalars( select(schemas.DataConnectorORM).where(schemas.DataConnectorORM.id == link.data_connector_id) ) ).one_or_none() if data_connector is None: - raise errors.MissingResourceError( - message=f"Data connector with id '{link.data_connector_id}' does not exist or you do not have access to it." # noqa E501 - ) + raise errors.MissingResourceError(message=dc_error_msg) project = ( await session.scalars(select(schemas.ProjectORM).where(schemas.ProjectORM.id == link.project_id)) @@ -444,14 +738,29 @@ async def insert_link( async def copy_link( self, user: base_models.APIUser, - project_id: ULID, + target_project_id: ULID, link: models.DataConnectorToProjectLink, *, session: AsyncSession | None = None, ) -> models.DataConnectorToProjectLink: """Create a new link from a given data connector link to a project.""" + allowed_to_read_dc = await self.authz.has_permission( + user, ResourceType.data_connector, link.data_connector_id, Scope.READ + ) + allowed_to_write_project = await self.authz.has_permission( + user, ResourceType.project, target_project_id, Scope.WRITE + ) + if not allowed_to_read_dc: + raise errors.MissingResourceError( + message=f"The data connector with ID {link.data_connector_id} does not exist " + "or you do not have access to it" + ) + if not allowed_to_write_project: + raise errors.MissingResourceError( + message=f"The project with ID {link.project_id} does not exist or you do not have access to it" + ) unsaved_link = models.UnsavedDataConnectorToProjectLink( - data_connector_id=link.data_connector_id, project_id=project_id + data_connector_id=link.data_connector_id, project_id=target_project_id ) return await self.insert_link(user=user, link=unsaved_link, session=session) @@ -479,6 +788,21 @@ async def delete_link( if link_orm is None: return None + allowed_to_write_project = await self.authz.has_permission( + user, ResourceType.project, link_orm.project_id, Scope.WRITE + ) + if not allowed_to_write_project: + raise errors.MissingResourceError( + message=f"The project with ID {link_orm.project_id} does not exist or you do not have access to it" + ) + + dc = await self.get_data_connector(user, data_connector_id) + if isinstance(dc.namespace, ProjectNamespace) and dc.namespace.underlying_resource_id == link_orm.project_id: + raise errors.ValidationError( + message="The data connector link to the owner project cannot be removed," + " you have to first move the data connector elsewhere and then unlink it." + ) + link = link_orm.dump() await session.delete(link_orm) return link @@ -521,12 +845,21 @@ async def get_data_connectors_with_secrets( ) async with self.session_maker() as session: - stmt = select(schemas.DataConnectorORM).where( - schemas.DataConnectorORM.project_links.any( - schemas.DataConnectorToProjectLinkORM.project_id == project_id - ), - schemas.DataConnectorORM.id.in_(data_connector_ids), + stmt = ( + select(schemas.DataConnectorORM) + .where( + schemas.DataConnectorORM.project_links.any( + schemas.DataConnectorToProjectLinkORM.project_id == project_id + ), + schemas.DataConnectorORM.id.in_(data_connector_ids), + ) + .options( + joinedload(schemas.DataConnectorORM.slug) + .joinedload(ns_schemas.EntitySlugORM.project) + .joinedload(ProjectORM.slug) + ) ) + results = await session.stream_scalars(stmt) async for dc in results: secrets = await self.get_data_connector_secrets(user, dc.id) @@ -566,7 +899,9 @@ async def patch_data_connector_secrets( raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") # NOTE: check that the user can access the data connector - await self.data_connector_repo.get_data_connector(user=user, data_connector_id=data_connector_id) + data_connector = await self.data_connector_repo.get_data_connector( + user=user, data_connector_id=data_connector_id + ) secrets_as_dict = {s.name: s.value for s in secrets} @@ -590,7 +925,11 @@ async def patch_data_connector_secrets( data_connector_secret_orm = existing_secrets_as_dict.get(name) if data_connector_secret_orm is None: continue - await session.delete(data_connector_secret_orm.secret) + await session.execute( + delete(secrets_schemas.SecretORM).where( + secrets_schemas.SecretORM.id == data_connector_secret_orm.secret_id + ) + ) del existing_secrets_as_dict[name] continue @@ -608,8 +947,13 @@ async def patch_data_connector_secrets( expiration_timestamp=expiration_timestamp, ) else: + secret_name = f"{data_connector.name[:45]} - {name[:45]}" + suffix = "".join([random.choice(string.ascii_lowercase + string.digits) for _ in range(8)]) # nosec B311 + secret_name_slug = base_models.Slug.from_name(name).value + default_filename = f"{secret_name_slug[:200]}-{suffix}" secret_orm = secrets_schemas.SecretORM( - name=f"{data_connector_id}-{name}", + name=secret_name, + default_filename=default_filename, user_id=user.id, encrypted_value=encrypted_value, encrypted_key=encrypted_key, @@ -624,6 +968,8 @@ async def patch_data_connector_secrets( ) session.add(secret_orm) session.add(data_connector_secret_orm) + await session.flush() + await session.refresh(data_connector_secret_orm) all_secrets.append(data_connector_secret_orm.dump()) @@ -644,13 +990,127 @@ async def delete_data_connector_secrets(self, user: base_models.APIUser, data_co await session.execute(stmt) -_T = TypeVar("_T") +_T = TypeVar("_T", int, schemas.DataConnectorORM) -def _filter_by_namespace_slug(statement: Select[tuple[_T]], namespace: str) -> Select[tuple[_T]]: +def _filter_by_namespace_slug(stmt: Select[tuple[_T]], namespace: ProjectPath | NamespacePath) -> Select[tuple[_T]]: """Filters a select query on data connectors to a given namespace.""" - return ( - statement.where(ns_schemas.NamespaceORM.slug == namespace.lower()) - .where(ns_schemas.EntitySlugORM.namespace_id == ns_schemas.NamespaceORM.id) - .where(schemas.DataConnectorORM.id == ns_schemas.EntitySlugORM.data_connector_id) + stmt = stmt.where( + schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.namespace.has( + ns_schemas.NamespaceORM.slug == namespace.first.value.lower(), + ) + ) ) + if isinstance(namespace, ProjectPath): + stmt = stmt.where( + schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.project.has( + schemas.ProjectORM.slug.has( + ns_schemas.EntitySlugORM.slug == namespace.second.value.lower(), + ) + ) + ) + ) + return stmt + + +def _old_data_connector_slug_queries( + path: DataConnectorInProjectPath | DataConnectorPath, +) -> list[Select[tuple[schemas.DataConnectorORM]]]: + """Prepare queries that return data connector IDs based on a full data connector path.""" + + def _dc_old_ns_is(slug: NamespaceSlug) -> ColumnExpressionArgument[bool]: + return schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.namespace.has( + ns_schemas.NamespaceORM.old_namespaces.any( + ns_schemas.NamespaceOldORM.slug == slug.value.lower(), + ) + ) + ) + + def _dc_new_ns_is(slug: NamespaceSlug) -> ColumnExpressionArgument[bool]: + return schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.namespace.has( + ns_schemas.NamespaceORM.slug == slug.value.lower(), + ) + ) + + def _dc_old_prj_is(slug: ProjectSlug) -> ColumnExpressionArgument[bool]: + return schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.project.has( + ProjectORM.old_slugs.any( + ns_schemas.EntitySlugOldORM.slug == slug.value.lower(), + ) + ) + ) + + def _dc_new_prj_is(slug: ProjectSlug) -> ColumnExpressionArgument[bool]: + return schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.project.has( + ProjectORM.slug.has( + ns_schemas.EntitySlugORM.slug == slug.value.lower(), + ) + ) + ) + + def _dc_old_slug_is(slug: DataConnectorSlug) -> ColumnExpressionArgument[bool]: + return schemas.DataConnectorORM.old_slugs.any( + ns_schemas.EntitySlugOldORM.slug == slug.value.lower(), + ) + + def _dc_new_slug_is(slug: DataConnectorSlug) -> ColumnExpressionArgument[bool]: + return schemas.DataConnectorORM.slug.has( + ns_schemas.EntitySlugORM.slug == slug.value.lower(), + ) + + def _dc_in_project(path: DataConnectorInProjectPath) -> list[Select[tuple[schemas.DataConnectorORM]]]: + """This finds all combinations of old/new slugs for user/group, project and data connector. + + It excludes the combination of new/new/new. + """ + result = [] + for i in range(2**3 - 1): + # NOTE: Changing the order of the statements here can lead to unexpected + # consequences or cause combinations that we want to exclude to be added (i.e. new/new/new). + stmt = select(schemas.DataConnectorORM) + if i & 0b001 == 0b001: # noqa: SIM108 + stmt = stmt.where(_dc_new_ns_is(path.first)) + else: + stmt = stmt.where(_dc_old_ns_is(path.first)) + + if i & 0b010 == 0b010: # noqa: SIM108 + stmt = stmt.where(_dc_new_prj_is(path.second)) + else: + stmt = stmt.where(_dc_old_prj_is(path.second)) + + if i & 0b100 == 0b100: # noqa: SIM108 + stmt = stmt.where(_dc_new_slug_is(path.third)) + else: + stmt = stmt.where(_dc_old_slug_is(path.third)) + result.append(stmt) + return result + + def _dc_in_user_or_group(path: DataConnectorPath) -> list[Select[tuple[schemas.DataConnectorORM]]]: + old_ns_old_slug = ( + select(schemas.DataConnectorORM).where(_dc_old_ns_is(path.first)).where(_dc_old_slug_is(path.second)) + ) + new_ns_old_slug = ( + select(schemas.DataConnectorORM).where(_dc_new_ns_is(path.first)).where(_dc_old_slug_is(path.second)) + ) + old_ns_new_slug = ( + select(schemas.DataConnectorORM).where(_dc_old_ns_is(path.first)).where(_dc_new_slug_is(path.second)) + ) + return [ + old_ns_old_slug, + new_ns_old_slug, + old_ns_new_slug, + ] + + match path: + case DataConnectorPath(): + return _dc_in_user_or_group(path) + case DataConnectorInProjectPath(): + return _dc_in_project(path) + case _: + raise errors.ProgrammingError(message="Got unknown data connector path type when resolving slugs.") diff --git a/components/renku_data_services/data_connectors/doi/__init__.py b/components/renku_data_services/data_connectors/doi/__init__.py new file mode 100644 index 000000000..232c55009 --- /dev/null +++ b/components/renku_data_services/data_connectors/doi/__init__.py @@ -0,0 +1 @@ +"""DOI module.""" diff --git a/components/renku_data_services/data_connectors/doi/metadata.py b/components/renku_data_services/data_connectors/doi/metadata.py new file mode 100644 index 000000000..02ad9c857 --- /dev/null +++ b/components/renku_data_services/data_connectors/doi/metadata.py @@ -0,0 +1,120 @@ +"""Metadata handling for DOIs.""" + +import httpx +from pydantic import ValidationError as PydanticValidationError + +from renku_data_services.data_connectors.doi import models +from renku_data_services.storage.rclone import RCloneDOIMetadata + + +async def get_dataset_metadata(rclone_metadata: RCloneDOIMetadata) -> models.DOIMetadata | None: + """Retrieve DOI metadata.""" + if rclone_metadata.provider == "invenio" or rclone_metadata.provider == "zenodo": + return await _get_dataset_metadata_invenio(rclone_metadata=rclone_metadata) + if rclone_metadata.provider == "dataverse": + return await _get_dataset_metadata_dataverse(rclone_metadata=rclone_metadata) + return None + + +async def _get_dataset_metadata_invenio(rclone_metadata: RCloneDOIMetadata) -> models.DOIMetadata | None: + """Retrieve DOI metadata from the InvenioRDM API.""" + metadata_url = rclone_metadata.metadata_url + if not metadata_url: + return None + + async with httpx.AsyncClient(timeout=5) as client: + try: + res = await client.get(url=metadata_url, follow_redirects=True, headers=[("accept", "application/json")]) + if res.status_code >= 400: + return None + record = models.InvenioRecord.model_validate_json(res.content) + except httpx.HTTPError: + return None + except PydanticValidationError: + return None + + name = "" + description = "" + keywords = [] + if record.metadata is not None: + name = record.metadata.title or "" + description = record.metadata.description or "" + keywords = record.metadata.keywords or [] + return models.DOIMetadata(name=name, description=description, keywords=keywords) + + +async def _get_dataset_metadata_dataverse(rclone_metadata: RCloneDOIMetadata) -> models.DOIMetadata | None: + """Retrieve DOI metadata from the Dataverse API.""" + metadata_url = rclone_metadata.metadata_url + if not metadata_url: + return None + + async with httpx.AsyncClient(timeout=5) as client: + try: + res = await client.get(url=metadata_url, follow_redirects=True, headers=[("accept", "application/json")]) + if res.status_code >= 400: + return None + response = models.DataverseDatasetResponse.model_validate_json(res.content) + except httpx.HTTPError: + return None + except PydanticValidationError: + return None + + if response.status != "OK": + return None + + name = "" + description = "" + keywords: list[str] = [] + if ( + response.data is not None + and response.data.latest_version is not None + and response.data.latest_version.metadata_blocks is not None + and response.data.latest_version.metadata_blocks.citation is not None + ): + for field in response.data.latest_version.metadata_blocks.citation.fields: + if field.type_name == "title" and field.type_class == "primitive" and not field.multiple: + name = str(field.value) + if ( + field.type_name == "dsDescription" + and field.type_class == "compound" + and field.multiple + and isinstance(field.value, list) + and field.value + ): + try: + description_field = models.DataverseMetadataBlockCitationField.model_validate( + field.value[0].get("dsDescriptionValue", dict()) + ) + if ( + description_field.type_name == "dsDescriptionValue" + and description_field.type_class == "primitive" + and not description_field.multiple + ): + description = str(description_field.value) + except AttributeError: + pass + except PydanticValidationError: + pass + if ( + field.type_name == "keyword" + and field.type_class == "compound" + and field.multiple + and isinstance(field.value, list) + ): + for value in field.value: + try: + kw_field = models.DataverseMetadataBlockCitationField.model_validate( + value.get("keywordValue", dict()) + ) + if ( + kw_field.type_name == "keywordValue" + and kw_field.type_class == "primitive" + and not kw_field.multiple + ): + keywords.append(str(kw_field.value)) + except AttributeError: + pass + except PydanticValidationError: + pass + return models.DOIMetadata(name=name, description=description, keywords=keywords) diff --git a/components/renku_data_services/data_connectors/doi/models.py b/components/renku_data_services/data_connectors/doi/models.py new file mode 100644 index 000000000..48da936aa --- /dev/null +++ b/components/renku_data_services/data_connectors/doi/models.py @@ -0,0 +1,69 @@ +"""Models for DOIs.""" + +from dataclasses import dataclass +from typing import Any + +from pydantic import BaseModel, Field + + +@dataclass(frozen=True, eq=True, kw_only=True) +class DOIMetadata: + """Model for DOI metadata.""" + + name: str + description: str + keywords: list[str] + + +class InvenioRecordMetadata(BaseModel): + """Representation of a record's metadata.""" + + title: str | None = Field(default=None) + description: str | None = Field(default=None) + keywords: list[str] | None = Field(default=None) + + +class InvenioRecord(BaseModel): + """Schema for the representation of a record from the InvenioRDM API.""" + + metadata: InvenioRecordMetadata | None = Field(default=None) + + +class DataverseMetadataBlockCitationField(BaseModel): + """A metadata field of citation metadata.""" + + type_name: str = Field(alias="typeName") + multiple: bool = Field() + type_class: str = Field(alias="typeClass") + value: Any = Field() # TODO: can we find better types here? + + +class DataverseMetadataBlockCitation(BaseModel): + """Representation of citation metadata.""" + + fields: list[DataverseMetadataBlockCitationField] = Field(default_factory=list) + + +class DataverseMetadataBlocks(BaseModel): + """Represents metadata of a Dataverse dataset.""" + + citation: DataverseMetadataBlockCitation | None = Field() + + +class DataverseDatasetVersion(BaseModel): + """Representation of a dataset version.""" + + metadata_blocks: DataverseMetadataBlocks | None = Field(alias="metadataBlocks") + + +class DataverseDataset(BaseModel): + """Representation of a dataset in Dataverse.""" + + latest_version: DataverseDatasetVersion | None = Field(alias="latestVersion") + + +class DataverseDatasetResponse(BaseModel): + """DataverseDatasetResponse is returned by the Dataverse dataset API.""" + + status: str = Field() + data: DataverseDataset | None = Field() diff --git a/components/renku_data_services/data_connectors/migration_utils.py b/components/renku_data_services/data_connectors/migration_utils.py index 62b965161..52a89352a 100644 --- a/components/renku_data_services/data_connectors/migration_utils.py +++ b/components/renku_data_services/data_connectors/migration_utils.py @@ -14,7 +14,7 @@ from renku_data_services.authz.models import Scope from renku_data_services.base_models.core import Slug from renku_data_services.data_connectors import models -from renku_data_services.data_connectors.db import DataConnectorProjectLinkRepository, DataConnectorRepository +from renku_data_services.data_connectors.db import DataConnectorRepository from renku_data_services.namespace.models import NamespaceKind from renku_data_services.project import models as projects_models from renku_data_services.project import orm as projects_schemas @@ -30,13 +30,11 @@ def __init__( self, session_maker: Callable[..., AsyncSession], data_connector_repo: DataConnectorRepository, - data_connector_project_link_repo: DataConnectorProjectLinkRepository, project_repo: ProjectRepository, authz: Authz, ) -> None: self.session_maker = session_maker self.data_connector_repo = data_connector_repo - self.data_connector_project_link_repo = data_connector_project_link_repo self.project_repo = project_repo self.authz = authz @@ -78,7 +76,7 @@ async def migrate_storage_v2( data_connector_id=data_connector.id, project_id=project_id, ) - await self.data_connector_project_link_repo.insert_link(user=data_connector_owner, link=unsaved_link) + await self.data_connector_repo.insert_link(user=data_connector_owner, link=unsaved_link) # Remove the storage_v2 from the database await self._delete_storage_v2(requested_by=requested_by, storage_id=storage.storage_id) @@ -101,7 +99,7 @@ async def _find_owner( if not isinstance(project.namespace.underlying_resource_id, ULID): raise errors.ProgrammingError( - message=f"Group namespace {project.namespace.slug} has an invalid underlying resource id {project.namespace.underlying_resource_id}." # noqa E501 + message=f"Group namespace {project.namespace} has an invalid underlying resource id {project.namespace.underlying_resource_id}." # noqa E501 ) group_id = project.namespace.underlying_resource_id @@ -154,14 +152,14 @@ async def _insert_data_connector( ) unsaved_data_connector = models.UnsavedDataConnector( name=storage.name, - namespace=project.namespace.slug, + namespace=project.path, slug=data_connector_slug, visibility=project.visibility, created_by="", storage=unsaved_storage, ) - data_connector = await self.data_connector_repo.insert_data_connector( + data_connector = await self.data_connector_repo.insert_namespaced_data_connector( user=user, data_connector=unsaved_data_connector ) return data_connector diff --git a/components/renku_data_services/data_connectors/models.py b/components/renku_data_services/data_connectors/models.py index 498e664b4..df0bcd880 100644 --- a/components/renku_data_services/data_connectors/models.py +++ b/components/renku_data_services/data_connectors/models.py @@ -2,16 +2,23 @@ from dataclasses import dataclass, field from datetime import UTC, datetime -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, Final from ulid import ULID from renku_data_services.authz.models import Visibility -from renku_data_services.namespace.models import Namespace -from renku_data_services.utils.etag import compute_etag_from_timestamp +from renku_data_services.base_models.core import ( + DataConnectorInProjectPath, + DataConnectorPath, + DataConnectorSlug, + NamespacePath, + ProjectPath, +) +from renku_data_services.namespace.models import GroupNamespace, ProjectNamespace, UserNamespace +from renku_data_services.utils.etag import compute_etag_from_fields if TYPE_CHECKING: - from renku_data_services.storage.rclone import RCloneOption + from renku_data_services.data_connectors.apispec import RCloneOption @dataclass(frozen=True, eq=True, kw_only=True) @@ -45,20 +52,51 @@ class DataConnector(BaseDataConnector): """Data connector model.""" id: ULID - namespace: Namespace + namespace: UserNamespace | GroupNamespace | ProjectNamespace updated_at: datetime @property def etag(self) -> str: """Entity tag value for this data connector object.""" - return compute_etag_from_timestamp(self.updated_at, include_quotes=True) + return compute_etag_from_fields(self.updated_at, self.path.serialize()) + + @property + def path(self) -> DataConnectorPath | DataConnectorInProjectPath: + """The full path (i.e. sequence of slugs) for the data connector including group or user and/or project.""" + return self.namespace.path / DataConnectorSlug(self.slug) @dataclass(frozen=True, eq=True, kw_only=True) class UnsavedDataConnector(BaseDataConnector): """A data connector that hasn't been stored in the database.""" - namespace: str + namespace: NamespacePath | ProjectPath + + @property + def path(self) -> DataConnectorPath | DataConnectorInProjectPath: + """The full path (i.e. sequence of slugs) for the data connector including group or user and/or project.""" + return self.namespace / DataConnectorSlug(self.slug) + + +@dataclass(frozen=True, eq=True, kw_only=True) +class GlobalDataConnector(BaseDataConnector): + """Global data connector model.""" + + id: ULID + namespace: Final[None] = field(default=None, init=False) + updated_at: datetime + + @property + def etag(self) -> str: + """Entity tag value for this data connector object.""" + return compute_etag_from_fields(self.updated_at) + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UnsavedGlobalDataConnector(BaseDataConnector): + """Global data connector model.""" + + namespace: None = None @dataclass(frozen=True, eq=True, kw_only=True) @@ -84,7 +122,7 @@ class DataConnectorPatch: """Model for changes requested on a data connector.""" name: str | None - namespace: str | None + namespace: NamespacePath | ProjectPath | None slug: str | None visibility: Visibility | None description: str | None @@ -103,8 +141,8 @@ class CloudStorageCoreWithSensitiveFields(CloudStorageCore): class DataConnectorUpdate: """Information about the update of a data connector.""" - old: DataConnector - new: DataConnector + old: DataConnector | GlobalDataConnector + new: DataConnector | GlobalDataConnector @dataclass(frozen=True, eq=True, kw_only=True) @@ -156,5 +194,5 @@ class DataConnectorPermissions: class DataConnectorWithSecrets: """A data connector with its secrets.""" - data_connector: DataConnector + data_connector: DataConnector | GlobalDataConnector secrets: list[DataConnectorSecret] = field(default_factory=list) diff --git a/components/renku_data_services/data_connectors/orm.py b/components/renku_data_services/data_connectors/orm.py index 6340c73be..f51d54d5c 100644 --- a/components/renku_data_services/data_connectors/orm.py +++ b/components/renku_data_services/data_connectors/orm.py @@ -19,7 +19,7 @@ from renku_data_services.utils.sqlalchemy import ULIDType if TYPE_CHECKING: - from renku_data_services.namespace.orm import EntitySlugORM + from renku_data_services.namespace.orm import EntitySlugOldORM, EntitySlugORM JSONVariant = JSON().with_variant(JSONB(), "postgresql") @@ -66,11 +66,14 @@ class DataConnectorORM(BaseORM): keywords: Mapped[list[str] | None] = mapped_column("keywords", ARRAY(String(99)), nullable=True) """Keywords for the data connector.""" - slug: Mapped["EntitySlugORM"] = relationship( + slug: Mapped["EntitySlugORM | None"] = relationship( lazy="joined", init=False, repr=False, viewonly=True, back_populates="data_connector" ) """Slug of the data connector.""" + global_slug: Mapped[str | None] = mapped_column(String(99), index=True, nullable=True, default=None, unique=True) + """Slug for global data connectors.""" + readonly: Mapped[bool] = mapped_column("readonly", Boolean(), default=True) """Whether this storage should be mounted readonly or not """ @@ -87,13 +90,38 @@ class DataConnectorORM(BaseORM): ) project_links: Mapped[list["DataConnectorToProjectLinkORM"]] = relationship(init=False, viewonly=True) - def dump(self) -> models.DataConnector: + old_slugs: Mapped[list["EntitySlugOldORM"]] = relationship( + back_populates="data_connector", + default_factory=list, + repr=False, + init=False, + viewonly=True, + ) + + def dump(self) -> models.DataConnector | models.GlobalDataConnector: """Create a data connector model from the DataConnectorORM.""" + if self.global_slug: + return models.GlobalDataConnector( + id=self.id, + name=self.name, + slug=self.global_slug, + visibility=self._dump_visibility(), + created_by=self.created_by_id, # TODO: should we use an admin id? Or drop it? + creation_date=self.creation_date, + updated_at=self.updated_at, + storage=self._dump_storage(), + description=self.description, + keywords=self.keywords, + ) + + elif self.slug is None: + raise ValueError("Either the slug or the global slug must be set.") + return models.DataConnector( id=self.id, name=self.name, slug=self.slug.slug, - namespace=self.slug.namespace.dump(), + namespace=self.slug.dump_namespace(), visibility=self._dump_visibility(), created_by=self.created_by_id, creation_date=self.creation_date, @@ -187,7 +215,9 @@ class DataConnectorSecretORM(BaseORM): name: Mapped[str] = mapped_column("name", String(), primary_key=True) secret_id: Mapped[ULID] = mapped_column("secret_id", ForeignKey(SecretORM.id, ondelete="CASCADE")) - secret: Mapped[SecretORM] = relationship(init=False, repr=False, lazy="selectin") + secret: Mapped[SecretORM] = relationship( + init=False, repr=False, back_populates="data_connector_secrets", lazy="selectin" + ) def dump(self) -> models.DataConnectorSecret: """Create a data connector secret model from the DataConnectorSecretORM.""" diff --git a/components/renku_data_services/db_config/config.py b/components/renku_data_services/db_config/config.py index e0763aea3..49947b8e1 100644 --- a/components/renku_data_services/db_config/config.py +++ b/components/renku_data_services/db_config/config.py @@ -20,22 +20,30 @@ class DBConfig: port: str = "5432" db_name: str = "renku" _async_engine: ClassVar[AsyncEngine | None] = field(default=None, repr=False, init=False) + pool_size: int = 4 @classmethod - def from_env(cls, prefix: str = "") -> "DBConfig": + def from_env(cls) -> "DBConfig": """Create a database configuration from environment variables.""" - pg_host = os.environ.get(f"{prefix}DB_HOST") - pg_user = os.environ.get(f"{prefix}DB_USER") - pg_port = os.environ.get(f"{prefix}DB_PORT") - db_name = os.environ.get(f"{prefix}DB_NAME") - pg_password = os.environ.get(f"{prefix}DB_PASSWORD") + pg_host = os.environ.get("DB_HOST") + pg_user = os.environ.get("DB_USER") + pg_port = os.environ.get("DB_PORT") + db_name = os.environ.get("DB_NAME") + pool_size = int(os.environ.get("DB_POOL_SIZE", "4")) + pg_password = os.environ.get("DB_PASSWORD") if pg_password is None: raise errors.ConfigurationError( - message=f"Please provide a database password in the '{prefix}DB_PASSWORD' environment variable." + message="Please provide a database password in the 'DB_PASSWORD' environment variable." ) - kwargs = {"host": pg_host, "password": pg_password, "port": pg_port, "db_name": db_name, "user": pg_user} - config = cls(**{k: v for (k, v) in kwargs.items() if v is not None}) + config = cls( + password=pg_password, + host=pg_host or "localhost", + user=pg_user or "renku", + port=pg_port or "5432", + db_name=db_name or "renku", + pool_size=pool_size, + ) return config def conn_url(self, async_client: bool = True) -> str: @@ -50,7 +58,7 @@ def async_session_maker(self) -> Callable[..., AsyncSession]: if not DBConfig._async_engine: DBConfig._async_engine = create_async_engine( self.conn_url(), - pool_size=10, + pool_size=self.pool_size, max_overflow=0, ) return async_sessionmaker(DBConfig._async_engine, expire_on_commit=False) diff --git a/components/renku_data_services/errors/__init__.py b/components/renku_data_services/errors/__init__.py index 817e29d5d..897b9fe84 100644 --- a/components/renku_data_services/errors/__init__.py +++ b/components/renku_data_services/errors/__init__.py @@ -1,3 +1,3 @@ """Service errors component.""" -from renku_data_services.errors.errors import * # noqa: F401, F403 +from renku_data_services.errors.errors import * # noqa: F403 diff --git a/components/renku_data_services/errors/errors.py b/components/renku_data_services/errors/errors.py index 394828b29..5c0ef6647 100644 --- a/components/renku_data_services/errors/errors.py +++ b/components/renku_data_services/errors/errors.py @@ -1,8 +1,11 @@ """Exceptions for the server.""" from dataclasses import dataclass +from enum import StrEnum from typing import Optional +from ulid import ULID + @dataclass class BaseError(Exception): @@ -76,6 +79,7 @@ class MissingResourceError(BaseError): code: int = 1404 status_code: int = 404 message: str = "The requested resource does not exist or cannot be found" + quiet: bool = True @dataclass @@ -161,3 +165,19 @@ class SecretCreationError(BaseError): code: int = 1511 message: str = "An error occurred creating secrets." status_code: int = 500 + + +@dataclass +class CannotStartBuildError(ProgrammingError): + """Raised when an image build couldn't be started.""" + + code: int = 1512 + message: str = "An error occurred creating an image build." + + +def missing_or_unauthorized(resource_type: str | StrEnum, id: str | int | ULID) -> MissingResourceError: + """Generate a missing resource error with an ambiguous message.""" + return MissingResourceError( + message=f"The {resource_type} with ID {id} does not exist or " + "you do not have sufficient permissions to access it", + ) diff --git a/components/renku_data_services/k8s/client_interfaces.py b/components/renku_data_services/k8s/client_interfaces.py index 36d2d782c..43cdc41d0 100644 --- a/components/renku_data_services/k8s/client_interfaces.py +++ b/components/renku_data_services/k8s/client_interfaces.py @@ -60,3 +60,8 @@ def create_priority_class(self, body: Any, **kwargs: Any) -> Any: def delete_priority_class(self, name: Any, **kwargs: Any) -> Any: """Delete a priority class.""" ... + + @abstractmethod + def get_priority_class(self, name: Any, **kwargs: Any) -> Any: + """Retrieve a priority class.""" + ... diff --git a/components/renku_data_services/k8s/clients.py b/components/renku_data_services/k8s/clients.py index 00783c6d7..7080ab421 100644 --- a/components/renku_data_services/k8s/clients.py +++ b/components/renku_data_services/k8s/clients.py @@ -1,17 +1,35 @@ """Different implementations of k8s clients.""" +from __future__ import annotations + +import asyncio +import contextlib import multiprocessing.synchronize +from collections.abc import AsyncIterable, Coroutine from copy import deepcopy from multiprocessing import Lock from multiprocessing.synchronize import Lock as LockType -from typing import Any +from typing import TYPE_CHECKING, Any from uuid import uuid4 +import kr8s from kubernetes import client, config from kubernetes.config.config_exception import ConfigException from kubernetes.config.incluster_config import SERVICE_CERT_FILENAME, SERVICE_TOKEN_FILENAME, InClusterConfigLoader +from renku_data_services.errors import errors from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface, K8sSchedudlingClientInterface +from renku_data_services.k8s.models import APIObjectInCluster, K8sObjectFilter + +if TYPE_CHECKING: + from renku_data_services.k8s.constants import ClusterId + from renku_data_services.k8s.models import ( + GVK, + Cluster, + K8sObject, + K8sObjectMeta, + ) + from renku_data_services.k8s_watcher import K8sDbCache class K8sCoreClient(K8sCoreClientInterface): # pragma:nocover @@ -81,6 +99,10 @@ def delete_priority_class(self, name: Any, **kwargs: Any) -> Any: """Delete a priority class.""" return self.client.delete_priority_class(name, **kwargs) + def get_priority_class(self, name: Any, **kwargs: Any) -> Any: + """Get a priority class.""" + return self.client.read_priority_class(name, **kwargs) + class DummyCoreClient(K8sCoreClientInterface): """Dummy k8s core API client that does not require a k8s cluster. @@ -214,3 +236,236 @@ def delete_priority_class(self, name: Any, **kwargs: Any) -> Any: if removed_pc is None: raise client.ApiException(status=404) return removed_pc + + def get_priority_class(self, name: Any, **kwargs: Any) -> Any: + """Get a priority class.""" + with self._lock: + return self.pcs.get(name, None) + + +class K8sClusterClient: + """A wrapper around a kr8s k8s client, acts on all resources of a cluster.""" + + def __init__(self, cluster: Cluster) -> None: + self.__cluster = cluster + assert self.__cluster.api is not None + + def get_cluster(self) -> Cluster: + """Return a cluster object.""" + return self.__cluster + + async def __list(self, _filter: K8sObjectFilter) -> AsyncIterable[APIObjectInCluster]: + if _filter.cluster is not None and _filter.cluster != self.__cluster.id: + return + + names = [_filter.name] if _filter.name is not None else [] + + try: + res = self.__cluster.api.async_get( + _filter.gvk.kr8s_kind, + *names, + label_selector=_filter.label_selector, + namespace=_filter.namespace, + ) + + async for r in res: + yield APIObjectInCluster(r, self.__cluster.id) + + except (kr8s.ServerError, kr8s.APITimeoutError, ValueError) as _e: + # ValueError is generated when the kind does not exist on the cluster + return + + async def __get_api_object(self, meta: K8sObjectFilter) -> APIObjectInCluster | None: + return await anext(aiter(self.__list(meta)), None) + + async def create(self, obj: K8sObject) -> K8sObject: + """Create the k8s object.""" + + api_obj = obj.to_api_object(self.__cluster.api) + await api_obj.create() + # if refresh isn't called, status and timestamp will be blank + await api_obj.refresh() + return obj.with_manifest(api_obj.to_dict()) + + async def patch(self, meta: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: + """Patch a k8s object. + + If the patch is a list we assume that we have a rfc6902 json patch like + `[{ "op": "add", "path": "/a/b/c", "value": [ "foo", "bar" ] }]`. + If the patch is a dictionary then it is considered to be a rfc7386 json merge patch. + """ + obj = await self.__get_api_object(meta.to_filter()) + if obj is None: + raise errors.MissingResourceError(message=f"The k8s resource with metadata {meta} cannot be found.") + patch_type = "json" if isinstance(patch, list) else None + await obj.obj.patch(patch, type=patch_type) + await obj.obj.refresh() + + return meta.with_manifest(obj.obj.to_dict()) + + async def delete(self, meta: K8sObjectMeta) -> None: + """Delete a k8s object.""" + obj = await self.__get_api_object(meta.to_filter()) + if obj is None: + return + with contextlib.suppress(kr8s.NotFoundError): + await obj.obj.delete(propagation_policy="Foreground") + + async def get(self, meta: K8sObjectMeta) -> K8sObject | None: + """Get a specific k8s object, None is returned if the object does not exist.""" + obj = await self.__get_api_object(meta.to_filter()) + if obj is None: + return None + return meta.with_manifest(obj.obj.to_dict()) + + async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: + """List all k8s objects.""" + async for r in self.__list(_filter): + yield r.to_k8s_object() + + +class K8SCachedClusterClient(K8sClusterClient): + """A wrapper around a kr8s k8s client. + + Provides access to a cache for listing and reading resources but fallback to the cluster for other operations. + """ + + def __init__(self, cluster: Cluster, cache: K8sDbCache, kinds_to_cache: list[GVK]) -> None: + super().__init__(cluster) + self.__cache = cache + self.__kinds_to_cache = set(kinds_to_cache) + + async def create(self, obj: K8sObject) -> K8sObject: + """Create the k8s object.""" + if obj.gvk in self.__kinds_to_cache: + await self.__cache.upsert(obj) + try: + obj = await super().create(obj) + except: + # if there was an error creating the k8s object, we delete it from the db again to not have ghost entries + if obj.gvk in self.__kinds_to_cache: + await self.__cache.delete(obj) + raise + return obj + + async def patch(self, meta: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: + """Patch a k8s object.""" + obj = await super().patch(meta, patch) + if meta.gvk in self.__kinds_to_cache: + await self.__cache.upsert(obj) + return obj + + async def delete(self, meta: K8sObjectMeta) -> None: + """Delete a k8s object.""" + await super().delete(meta) + # NOTE: We use foreground deletion in the k8s client. + # This means that the parent resource is usually not deleted immediately and will + # wait for its children to be deleted before it is deleted. + # To avoid premature purging of resources from the cache we do not delete the resource here + # from the cache, rather we expect that the cache will sync itself properly and quickly purge + # stale resources. + + async def get(self, meta: K8sObjectMeta) -> K8sObject | None: + """Get a specific k8s object, None is returned if the object does not exist.""" + if meta.gvk in self.__kinds_to_cache: + res = await self.__cache.get(meta) + else: + res = await super().get(meta) + + return res + + async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: + """List all k8s objects.""" + + # Don't even go to the DB or Kubernetes if the cluster id is set and does not match our cluster. + if _filter.cluster is not None and _filter.cluster != self.get_cluster().id: + return + + filter2 = deepcopy(_filter) + if filter2.cluster is None: + filter2.cluster = self.get_cluster().id + + results = self.__cache.list(filter2) if _filter.gvk in self.__kinds_to_cache else super().list(filter2) + async for res in results: + yield res + + +class K8sClusterClientsPool: + """A wrapper around a kr8s k8s client, acts on all resources over many clusters.""" + + def __init__( + self, cache: K8sDbCache, kinds_to_cache: list[GVK], get_clusters: Coroutine[Any, Any, list[Cluster]] + ) -> None: + self.__clients: dict[ClusterId, K8sClusterClient] | None = None + self.__cache = cache + self.__kinds_to_cache = kinds_to_cache + self.__get_clusters = get_clusters + self.__lock = asyncio.Lock() + + async def __load(self) -> None: + # Avoid trying to take a lock when we have loaded the dictionary (99% of the time) + if self.__clients is not None: + return + + async with self.__lock: + # We know it was none before getting the lock, but we might have been preempted by another coroutine which + # could have done the job by now, so check again, if still not set, load the value, otherwise we are done + if self.__clients is None: + clusters: list[Cluster] = await self.__get_clusters + self.__clients = { + c.id: K8SCachedClusterClient(c, self.__cache, self.__kinds_to_cache) for c in clusters + } + + async def __get_client_or_die(self, cluster_id: ClusterId) -> K8sClusterClient: + cluster_client = None + if self.__clients is None: + await self.__load() + + if self.__clients is not None: + cluster_client = self.__clients.get(cluster_id) + + if cluster_client is None: + raise errors.MissingResourceError( + message=f"Could not find cluster with id {cluster_id} in the list of clusters." + ) + return cluster_client + + def cluster_by_id(self, cluster_id: ClusterId) -> Cluster: + """Return a cluster by its id.""" + _client = None + if self.__clients is not None: + _client = self.__clients.get(cluster_id) + + if _client is not None: + return _client.get_cluster() + + raise errors.MissingResourceError( + message=f"Could not find cluster with id {cluster_id} in the list of clusters." + ) + + async def create(self, obj: K8sObject) -> K8sObject: + """Create the k8s object.""" + return await (await self.__get_client_or_die(obj.cluster)).create(obj) + + async def patch(self, meta: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: + """Patch a k8s object.""" + return await (await self.__get_client_or_die(meta.cluster)).patch(meta, patch) + + async def delete(self, meta: K8sObjectMeta) -> None: + """Delete a k8s object.""" + await (await self.__get_client_or_die(meta.cluster)).delete(meta) + + async def get(self, meta: K8sObjectMeta) -> K8sObject | None: + """Get a specific k8s object, None is returned if the object does not exist.""" + return await (await self.__get_client_or_die(meta.cluster)).get(meta) + + async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: + """List all k8s objects.""" + if self.__clients is None: + await self.__load() + + if self.__clients is not None: + cluster_clients = [v for v in self.__clients.values()] + for c in cluster_clients: + async for r in c.list(_filter): + yield r diff --git a/components/renku_data_services/k8s/config.py b/components/renku_data_services/k8s/config.py new file mode 100644 index 000000000..0e522f073 --- /dev/null +++ b/components/renku_data_services/k8s/config.py @@ -0,0 +1,117 @@ +"""Base config for k8s.""" + +import os + +import kr8s +import yaml + +import renku_data_services.k8s.constants +from renku_data_services.app_config import logging +from renku_data_services.crc.db import ClusterRepository +from renku_data_services.k8s import models as k8s_models +from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER + +logger = logging.getLogger(__name__) + + +class KubeConfig: + """Wrapper around kube config to get a kr8s api.""" + + def __init__( + self, + kubeconfig: str | None = None, + current_context_name: str | None = None, + ns: str | None = None, + sa: str | None = None, + url: str | None = None, + ) -> None: + self._kubeconfig = kubeconfig + self._ns = ns + self._current_context_name = current_context_name + self._sa = sa + self._url = url + + def _sync_api(self) -> kr8s.Api | kr8s._AsyncApi: + return kr8s.api( + kubeconfig=self._kubeconfig, + namespace=self._ns, + context=self._current_context_name, + ) + + def _async_api(self) -> kr8s.asyncio.Api: + """Create an async api client from sync code. + + Kr8s cannot return an AsyncAPI instance from sync code, and we can't easily make all our config code async, + so this method is a direct copy of the kr8s sync client code, just that it returns an async client. + """ + ret = kr8s._async_utils.run_sync(kr8s.asyncio.api)( + url=self._url, + kubeconfig=self._kubeconfig, + serviceaccount=self._sa, + namespace=self._ns, + context=self._current_context_name, + _asyncio=True, # This is the only line that is different from kr8s code + ) + assert isinstance(ret, kr8s.asyncio.Api) + return ret + + def api(self, _async: bool = True) -> kr8s.Api | kr8s._AsyncApi: + """Instantiate the Kr8s Api object based on the configuration.""" + if _async: + return self._async_api() + else: + return self._sync_api() + + +class KubeConfigEnv(KubeConfig): + """Get a kube config from the environment.""" + + def __init__(self) -> None: + super().__init__(ns=os.environ.get("K8S_NAMESPACE", "default")) + + +class KubeConfigYaml(KubeConfig): + """Get a kube config from a yaml file.""" + + def __init__(self, kubeconfig: str) -> None: + super().__init__(kubeconfig=kubeconfig) + + with open(kubeconfig) as stream: + _conf = yaml.safe_load(stream) + + self._current_context_name = _conf.get("current-context", None) + if self._current_context_name is not None: + for context in _conf.get("contexts", []): + name = context.get("name", None) + inner = context.get("context", None) + if inner is not None and name is not None and name == self._current_context_name: + self._ns = inner.get("namespace", None) + break + + +async def get_clusters( + kube_conf_root_dir: str, namespace: str, api: kr8s.asyncio.Api, cluster_rp: ClusterRepository +) -> list[k8s_models.Cluster]: + """Get all clusters accessible to the application.""" + + clusters = [k8s_models.Cluster(id=DEFAULT_K8S_CLUSTER, namespace=namespace, api=api)] + + if not os.path.exists(kube_conf_root_dir): + logger.warning(f"Cannot open directory '{kube_conf_root_dir}', ignoring kube configs...") + return clusters + + async for db_cluster in cluster_rp.select_all(): + filename = db_cluster.config_name + try: + kube_config = KubeConfigYaml(f"{kube_conf_root_dir}/{filename}") + cluster = k8s_models.Cluster( + id=renku_data_services.k8s.constants.ClusterId(str(db_cluster.id)), + namespace=kube_config.api().namespace, + api=kube_config.api(), + ) + clusters.append(cluster) + logger.info(f"Successfully loaded Kubernetes config: '{kube_conf_root_dir}/{filename}'") + except Exception as e: + logger.warning(f"Failed while loading '{kube_conf_root_dir}/{filename}', ignoring kube config. Error: {e}") + + return clusters diff --git a/components/renku_data_services/k8s/constants.py b/components/renku_data_services/k8s/constants.py new file mode 100644 index 000000000..50c06e0e8 --- /dev/null +++ b/components/renku_data_services/k8s/constants.py @@ -0,0 +1,17 @@ +"""Constant values for k8s.""" + +from __future__ import annotations + +from typing import Final, NewType + +# LSA Not enough time: Adapt this to be an alias to ULID +ClusterId = NewType("ClusterId", str) + +DEFAULT_K8S_CLUSTER: Final[ClusterId] = ClusterId("0RENK1RENK2RENK3RENK4RENK5") # This has to be a valid ULID + +DUMMY_TASK_RUN_USER_ID: Final[str] = "DummyTaskRunUser" +"""The user id to use for TaskRuns in the k8s cache. + +Note: we can't curently propagate labels to TaskRuns through shipwright, so we just use a dummy user id for all of them. +This might change if shipwright SHIP-0034 gets implemented. +""" diff --git a/components/renku_data_services/k8s/models.py b/components/renku_data_services/k8s/models.py new file mode 100644 index 000000000..07f583e82 --- /dev/null +++ b/components/renku_data_services/k8s/models.py @@ -0,0 +1,275 @@ +"""Models for the k8s watcher.""" + +from __future__ import annotations + +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, Self, cast + +from box import Box +from kr8s._api import Api +from kr8s.asyncio.objects import APIObject +from ulid import ULID + +from renku_data_services.base_models import APIUser +from renku_data_services.errors import MissingResourceError, errors +from renku_data_services.k8s.constants import DUMMY_TASK_RUN_USER_ID, ClusterId +from renku_data_services.notebooks.cr_amalthea_session import TlsSecret + +if TYPE_CHECKING: + from renku_data_services.crc.db import ClusterRepository + from renku_data_services.notebooks.config.dynamic import _SessionIngress + + +class K8sObjectMeta: + """Metadata about a k8s object.""" + + def __init__( + self, + name: str, + namespace: str, + cluster: ClusterId, + gvk: GVK, + user_id: str | None = None, + namespaced: bool = True, + ) -> None: + self.name = name + self.namespace = namespace + self.cluster = cluster + self.gvk = gvk + self.user_id = user_id + + self.namespaced = namespaced + + def with_manifest(self, manifest: dict[str, Any]) -> K8sObject: + """Convert to a full k8s object.""" + return K8sObject( + name=self.name, + namespace=self.namespace, + cluster=self.cluster, + gvk=self.gvk, + manifest=Box(manifest), + user_id=self.user_id, + ) + + def to_filter(self) -> K8sObjectFilter: + """Convert the metadata to a filter used when listing resources.""" + return K8sObjectFilter( + gvk=self.gvk, + namespace=self.namespace, + cluster=self.cluster, + name=self.name, + user_id=self.user_id, + ) + + def __repr__(self) -> str: + return ( + f"K8sObject(name={self.name}, namespace={self.namespace}, cluster={self.cluster}, " + f"gvk={self.gvk}, user_id={self.user_id})" + ) + + +class K8sObject(K8sObjectMeta): + """Represents an object in k8s.""" + + def __init__( + self, + name: str, + namespace: str, + cluster: ClusterId, + gvk: GVK, + manifest: Box, + user_id: str | None = None, + namespaced: bool = True, + ) -> None: + super().__init__(name, namespace, cluster, gvk, user_id, namespaced) + self.manifest = manifest + + def __repr__(self) -> str: + return super().__repr__() + + def to_api_object(self, api: Api) -> APIObject: + """Convert a regular k8s object to an api object for kr8s.""" + + _singular = self.gvk.kind.lower() + _plural = f"{_singular}s" + _endpoint = _plural + + class _APIObj(APIObject): + kind = self.gvk.kind + version = self.gvk.group_version + singular = _singular + plural = _plural + endpoint = _endpoint + namespaced = self.namespaced + + return _APIObj(resource=self.manifest, namespace=self.namespace, api=api) + + +@dataclass +class K8sObjectFilter: + """Parameters used when filtering resources from the cache or k8s.""" + + gvk: GVK + name: str | None = None + namespace: str | None = None + cluster: ClusterId | None = None + label_selector: dict[str, str] | None = None + user_id: str | None = None + + +@dataclass(eq=True, frozen=True) +class Cluster: + """Representation of a k8s cluster.""" + + id: ClusterId + namespace: str + api: Api + + def with_api_object(self, obj: APIObject) -> APIObjectInCluster: + """Create an API object associated with the cluster.""" + return APIObjectInCluster(obj, self.id) + + async def get_storage_class( + self, user: APIUser, cluster_repo: ClusterRepository, default_storage_class: str | None + ) -> str | None: + """Get the default storage class for the cluster.""" + try: + cluster = await cluster_repo.select(user, ULID.from_str(self.id)) + storage_class = cluster.session_storage_class + except (MissingResourceError, ValueError) as _e: + storage_class = default_storage_class + + return storage_class + + async def get_ingress_parameters( + self, user: APIUser, cluster_repo: ClusterRepository, main_ingress: _SessionIngress, server_name: str + ) -> tuple[str, str, str, str, TlsSecret | None, dict[str, str]]: + """Returns the ingress parameters of the cluster.""" + tls_name = None + + try: + cluster = await cluster_repo.select(user, ULID.from_str(self.id)) + + host = cluster.session_host + base_server_path = f"{cluster.session_path}/{server_name}" + base_server_url = f"{cluster.session_protocol.value}://{host}:{cluster.session_port}{base_server_path}" + base_server_https_url = base_server_url + tls_name = cluster.session_tls_secret_name + ingress_annotations = cluster.session_ingress_annotations + except (MissingResourceError, ValueError) as _e: + # Fallback to global, main cluster parameters + host = main_ingress.host + base_server_path = main_ingress.base_path(server_name) + base_server_url = main_ingress.base_url(server_name) + base_server_https_url = main_ingress.base_url(server_name, force_https=True) + ingress_annotations = main_ingress.annotations + + if main_ingress.tls_secret is not None: + tls_name = main_ingress.tls_secret + + tls_secret = None if tls_name is None else TlsSecret(adopt=False, name=tls_name) + + return base_server_path, base_server_url, base_server_https_url, host, tls_secret, ingress_annotations + + +@dataclass(kw_only=True, frozen=True) +class GVK: + """The information about the group, version and kind of a K8s object.""" + + kind: str + version: str + group: str | None = None + + @property + def group_version(self) -> str: + """Get the group and version joined by '/'.""" + if self.group == "core" or self.group is None: + return self.version + return f"{self.group}/{self.version}" + + @property + def kr8s_kind(self) -> str: + """Returns the fully qualified kind string for this filter for kr8s. + + Note: This exists because kr8s has some methods where it only allows you to specify 'kind' and then has + weird logic to split that. This method is essentially the reverse of the kr8s logic so we can hand it a + string it will accept. + """ + if self.group is None: + # e.g. pod/v1 + return f"{self.kind.lower()}/{self.version}" + # e.g. buildrun.shipwright.io/v1beta1 + return f"{self.kind.lower()}.{self.group_version}" + + @classmethod + def from_kr8s_object(cls, kr8s_obj: type[APIObject] | APIObject) -> Self: + """Extract GVK from a kr8s object.""" + if "/" in kr8s_obj.version: + grp_version_split = kr8s_obj.version.split("/") + grp = grp_version_split[0] + version = grp_version_split[1] + else: + grp = None + version = kr8s_obj.version + return cls( + kind=kr8s_obj.kind, + group=grp, + version=version, + ) + + +@dataclass +class APIObjectInCluster: + """A kr8s k8s object from a specific cluster.""" + + obj: APIObject + cluster: ClusterId + + @property + def user_id(self) -> str | None: + """Extract the user id from annotations.""" + match self.obj.singular: + case "jupyterserver": + return cast(str, self.obj.metadata.labels["renku.io/userId"]) + case "amaltheasession": + return cast(str, self.obj.metadata.labels["renku.io/safe-username"]) + case "buildrun": + return cast(str, self.obj.metadata.labels["renku.io/safe-username"]) + + case "taskrun": + return DUMMY_TASK_RUN_USER_ID + case _: + return None + + @property + def meta(self) -> K8sObjectMeta: + """Extract the metadata from an api object.""" + return K8sObjectMeta( + name=self.obj.name, + namespace=self.obj.namespace or "default", + cluster=self.cluster, + gvk=GVK.from_kr8s_object(self.obj), + user_id=self.user_id, + ) + + def to_k8s_object(self) -> K8sObject: + """Convert the api object to a regular k8s object.""" + if self.obj.name is None or self.obj.namespace is None: + raise errors.ProgrammingError() + return K8sObject( + name=self.obj.name, + namespace=self.obj.namespace, + gvk=GVK.from_kr8s_object(self.obj), + manifest=Box(self.obj.to_dict()), + cluster=self.cluster, + user_id=self.user_id, + ) + + @classmethod + def from_k8s_object(cls, obj: K8sObject, api: Api) -> Self: + """Convert a regular k8s object to an api object.""" + + return cls( + obj=obj.to_api_object(api), + cluster=obj.cluster, + ) diff --git a/components/renku_data_services/k8s/quota.py b/components/renku_data_services/k8s/quota.py index 28bce8f94..e12abda8b 100644 --- a/components/renku_data_services/k8s/quota.py +++ b/components/renku_data_services/k8s/quota.py @@ -1,5 +1,6 @@ """The adapter used to create/delete/update/get resource quotas and priority classes in k8s.""" +from contextlib import suppress from dataclasses import dataclass, field from typing import Optional @@ -13,7 +14,7 @@ @dataclass class QuotaRepository: - """Adapter for CRUD operations on resource quotas and prioirty classes in k8s.""" + """Adapter for CRUD operations on resource quotas and priority classes in k8s.""" core_client: K8sCoreClientInterface scheduling_client: K8sSchedudlingClientInterface @@ -91,20 +92,25 @@ def get_quotas(self, name: Optional[str] = None) -> list[models.Quota]: def create_quota(self, quota: models.Quota) -> models.Quota: """Create a resource quota and priority class.""" - if quota.id: - raise errors.BaseError(message=f"Cannot create a quota with a preset id - {quota.id}.") - quota = quota.generate_id() + metadata = {"labels": {self._label_name: self._label_value}, "name": quota.id} quota_manifest = self._quota_to_manifest(quota) - pc: client.V1PriorityClass = self.scheduling_client.create_priority_class( - client.V1PriorityClass( - global_default=False, - value=100, - preemption_policy="Never", - description="Renku resource quota prioirty class", - metadata=client.V1ObjectMeta(**metadata), - ), - ) + + # LSA Check if we have a priority class with the given name, return it or create one otherwise. + pc: client.V1PriorityClass | None = None + with suppress(client.ApiException): + pc = self.scheduling_client.get_priority_class(quota.id) + if pc is None: + pc = self.scheduling_client.create_priority_class( + client.V1PriorityClass( + global_default=False, + value=100, + preemption_policy="Never", + description="Renku resource quota priority class", + metadata=client.V1ObjectMeta(**metadata), + ), + ) + # NOTE: The priority class is cluster-scoped and a namespace-scoped resource cannot be an owner # of a cluster-scoped resource. That is why the priority class is an owner of the quota. quota_manifest.owner_references = [ @@ -130,15 +136,14 @@ def delete_quota(self, name: str) -> None: except client.ApiException as e: if e.status == 404: # NOTE: The priorityclass is an owner of the resource quota so when the priority class is deleted the - # resource quota is also deleted. Also we dont care if the thing we are trying to delete is not there + # resource quota is also deleted. Also, we don't care if the thing we are trying to delete is not there # we have the desired state so we can just go on. return raise def update_quota(self, quota: models.Quota) -> models.Quota: """Update a specific resource quota.""" - if not quota.id: - quota = quota.generate_id() + quota_manifest = self._quota_to_manifest(quota) self.core_client.patch_namespaced_resource_quota(name=quota.id, namespace=self.namespace, body=quota_manifest) return quota diff --git a/components/renku_data_services/k8s_watcher/__init__.py b/components/renku_data_services/k8s_watcher/__init__.py new file mode 100644 index 000000000..1ab1a3d94 --- /dev/null +++ b/components/renku_data_services/k8s_watcher/__init__.py @@ -0,0 +1,7 @@ +"""K8s watcher.""" + +from renku_data_services.k8s_watcher.core import K8sWatcher, k8s_object_handler +from renku_data_services.k8s_watcher.db import K8sDbCache +from renku_data_services.k8s_watcher.orm import BaseORM + +__all__ = ["K8sWatcher", "k8s_object_handler", "K8sDbCache", "BaseORM"] diff --git a/components/renku_data_services/k8s_watcher/core.py b/components/renku_data_services/k8s_watcher/core.py new file mode 100644 index 000000000..c9af1c1d0 --- /dev/null +++ b/components/renku_data_services/k8s_watcher/core.py @@ -0,0 +1,220 @@ +"""K8s watcher main.""" + +from __future__ import annotations + +import asyncio +import contextlib +from asyncio import CancelledError, Task +from collections.abc import Awaitable, Callable +from datetime import datetime, timedelta +from typing import TYPE_CHECKING + +from renku_data_services.app_config import logging +from renku_data_services.base_models.core import APIUser, InternalServiceAdmin, ServiceAdminId +from renku_data_services.base_models.metrics import MetricsService +from renku_data_services.crc.db import ResourcePoolRepository +from renku_data_services.k8s.clients import K8sClusterClient +from renku_data_services.k8s.models import GVK, K8sObject, K8sObjectFilter +from renku_data_services.k8s_watcher.db import K8sDbCache +from renku_data_services.notebooks.crs import State + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from renku_data_services.k8s.constants import ClusterId + from renku_data_services.k8s.models import APIObjectInCluster, Cluster + +type EventHandler = Callable[[APIObjectInCluster, str], Awaitable[None]] +type SyncFunc = Callable[[], Awaitable[None]] + +k8s_watcher_admin_user = InternalServiceAdmin(id=ServiceAdminId.k8s_watcher) + + +class K8sWatcher: + """Watch k8s events and call the handler with every event.""" + + def __init__( + self, + handler: EventHandler, + clusters: dict[ClusterId, Cluster], + kinds: list[GVK], + db_cache: K8sDbCache, + ) -> None: + self.__handler = handler + self.__watch_tasks: dict[ClusterId, list[Task]] = {} + self.__full_sync_tasks: dict[ClusterId, Task] = {} + self.__full_sync_times: dict[ClusterId, datetime] = {} + self.__full_sync_running: set[ClusterId] = set() + self.__kinds = kinds + self.__clusters = clusters + self.__sync_period_seconds = 600 + self.__cache = db_cache + + async def __sync(self, cluster: Cluster, kind: GVK) -> None: + """Upsert K8s objects in the cache and remove deleted objects from the cache.""" + clnt = K8sClusterClient(cluster) + fltr = K8sObjectFilter(gvk=kind, cluster=cluster.id, namespace=cluster.namespace) + # Upsert new / updated objects + objects_in_k8s: dict[str, K8sObject] = {} + async for obj in clnt.list(fltr): + objects_in_k8s[obj.name] = obj + await self.__cache.upsert(obj) + # Remove objects that have been deleted from k8s but are still in cache + async for cache_obj in self.__cache.list(fltr): + cache_obj_is_in_k8s = objects_in_k8s.get(cache_obj.name) is not None + if cache_obj_is_in_k8s: + continue + await self.__cache.delete(cache_obj) + + async def __full_sync(self, cluster: Cluster) -> None: + """Run the full sync if it has never run or at the required interval.""" + last_sync = self.__full_sync_times.get(cluster.id) + since_last_sync = datetime.now() - last_sync if last_sync is not None else None + if since_last_sync is not None and since_last_sync.total_seconds() < self.__sync_period_seconds: + return + self.__full_sync_running.add(cluster.id) + for kind in self.__kinds: + logger.info(f"Starting full k8s cache sync for cluster {cluster} and kind {kind}") + await self.__sync(cluster, kind) + self.__full_sync_times[cluster.id] = datetime.now() + self.__full_sync_running.remove(cluster.id) + + async def __periodic_full_sync(self, cluster: Cluster) -> None: + """Keeps trying to run the full sync.""" + while True: + await self.__full_sync(cluster) + await asyncio.sleep(self.__sync_period_seconds / 10) + + async def __watch_kind(self, kind: GVK, cluster: Cluster) -> None: + while True: + try: + watch = cluster.api.async_watch(kind=kind.kr8s_kind, namespace=cluster.namespace) + async for event_type, obj in watch: + while cluster.id in self.__full_sync_running: + logger.info( + f"Pausing k8s watch event processing for cluster {cluster} until full sync completes" + ) + await asyncio.sleep(5) + await self.__handler(cluster.with_api_object(obj), event_type) + # in some cases, the kr8s loop above just never yields, especially if there's exceptions which + # can bypass async scheduling. This sleep here is as a last line of defence so this code does not + # execute indefinitely and prevent another resource kind from being watched. + await asyncio.sleep(0) + except Exception as e: + logger.error(f"watch loop failed for {kind} in cluster {cluster.id}", exc_info=e) + # without sleeping, this can just hang the code as exceptions seem to bypass the async scheduler + await asyncio.sleep(1) + pass + + def __run_single(self, cluster: Cluster) -> list[Task]: + # The loops and error handling here will need some testing and love + tasks = [] + for kind in self.__kinds: + logger.info(f"watching {kind} in cluster {cluster.id}") + tasks.append(asyncio.create_task(self.__watch_kind(kind, cluster))) + + return tasks + + async def start(self) -> None: + """Start the watcher.""" + for cluster in sorted(self.__clusters.values(), key=lambda x: x.id): + await self.__full_sync(cluster) + self.__full_sync_tasks[cluster.id] = asyncio.create_task(self.__periodic_full_sync(cluster)) + self.__watch_tasks[cluster.id] = self.__run_single(cluster) + + async def wait(self) -> None: + """Wait for all tasks. + + This is mainly used to block the main function. + """ + all_tasks = list(self.__full_sync_tasks.values()) + for tasks in self.__watch_tasks.values(): + all_tasks.extend(tasks) + await asyncio.gather(*all_tasks) + + async def stop(self, timeout: timedelta = timedelta(seconds=10)) -> None: + """Stop the watcher or timeout.""" + + async def stop_task(task: Task, timeout: timedelta) -> None: + if task.done(): + return + task.cancel() + try: + async with asyncio.timeout(timeout.total_seconds()): + with contextlib.suppress(CancelledError): + await task + except TimeoutError: + logger.error("timeout trying to cancel k8s watcher task") + return + + for task_list in self.__watch_tasks.values(): + for task in task_list: + await stop_task(task, timeout) + for task in self.__full_sync_tasks.values(): + await stop_task(task, timeout) + + +async def collect_metrics( + previous_obj: K8sObject | None, + new_obj: APIObjectInCluster, + event_type: str, + user_id: str, + metrics: MetricsService, + rp_repo: ResourcePoolRepository, +) -> None: + """Track product metrics.""" + user = APIUser(id=user_id) + + if event_type == "DELETED": + # session stopping + await metrics.session_stopped(user=user, metadata={"session_id": new_obj.meta.name}) + return + previous_state = previous_obj.manifest.get("status", {}).get("state", None) if previous_obj else None + match new_obj.obj.status.state: + case State.Running.value if previous_state is None or previous_state == State.NotReady.value: + # session starting + resource_class_id = int(new_obj.obj.metadata.annotations.get("renku.io/resource_class_id")) + resource_pool = await rp_repo.get_resource_pool_from_class(k8s_watcher_admin_user, resource_class_id) + resource_class = await rp_repo.get_resource_class(k8s_watcher_admin_user, resource_class_id) + + await metrics.session_started( + user=user, + metadata={ + "cpu": int(resource_class.cpu * 1000), + "memory": resource_class.memory, + "gpu": resource_class.gpu, + "storage": new_obj.obj.spec.session.storage.size, + "resource_class_id": resource_class_id, + "resource_pool_id": resource_pool.id or "", + "resource_class_name": f"{resource_pool.name}.{resource_class.name}", + "session_id": new_obj.meta.name, + }, + ) + case State.Running.value | State.NotReady.value if previous_state == State.Hibernated.value: + # session resumed + await metrics.session_resumed(user, metadata={"session_id": new_obj.meta.name}) + case State.Hibernated.value if previous_state != State.Hibernated.value: + # session hibernated + await metrics.session_hibernated(user=user, metadata={"session_id": new_obj.meta.name}) + case _: + pass + + +def k8s_object_handler(cache: K8sDbCache, metrics: MetricsService, rp_repo: ResourcePoolRepository) -> EventHandler: + """Listens and to k8s events and updates the cache.""" + + async def handler(obj: APIObjectInCluster, event_type: str) -> None: + existing = await cache.get(obj.meta) + if obj.user_id is not None: + try: + await collect_metrics(existing, obj, event_type, obj.user_id, metrics, rp_repo) + except Exception as e: + logger.error("failed to track product metrics", exc_info=e) + if event_type == "DELETED": + await cache.delete(obj.meta) + return + k8s_object = obj.to_k8s_object() + k8s_object.user_id = obj.user_id + await cache.upsert(k8s_object) + + return handler diff --git a/components/renku_data_services/k8s_watcher/db.py b/components/renku_data_services/k8s_watcher/db.py new file mode 100644 index 000000000..411ffa87f --- /dev/null +++ b/components/renku_data_services/k8s_watcher/db.py @@ -0,0 +1,102 @@ +"""K8s watcher database and k8s wrappers.""" + +from __future__ import annotations + +from collections.abc import AsyncIterable, Callable + +import sqlalchemy +from sqlalchemy import Select, bindparam, select +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.asyncio import AsyncSession + +from renku_data_services.errors import errors +from renku_data_services.k8s.models import K8sObject, K8sObjectFilter, K8sObjectMeta +from renku_data_services.k8s_watcher.orm import K8sObjectORM + + +class K8sDbCache: + """Caching k8s objects in postgres.""" + + def __init__(self, session_maker: Callable[..., AsyncSession]) -> None: + self.__session_maker = session_maker + + @staticmethod + def __get_where_clauses(_filter: K8sObjectFilter) -> Select[tuple[K8sObjectORM]]: + stmt = select(K8sObjectORM) + if _filter.name is not None: + stmt = stmt.where(K8sObjectORM.name == _filter.name) + if _filter.namespace is not None: + stmt = stmt.where(K8sObjectORM.namespace == _filter.namespace) + if _filter.cluster is not None: + stmt = stmt.where(K8sObjectORM.cluster == str(_filter.cluster)) + if _filter.gvk is not None: + stmt = stmt.where(K8sObjectORM.kind_insensitive == _filter.gvk.kind) + stmt = stmt.where(K8sObjectORM.version_insensitive == _filter.gvk.version) + if _filter.gvk.group is None: + stmt = stmt.where(K8sObjectORM.group.is_(None)) + else: + stmt = stmt.where(K8sObjectORM.group_insensitive == _filter.gvk.group) + if _filter.user_id is not None: + stmt = stmt.where(K8sObjectORM.user_id == _filter.user_id) + if _filter.label_selector is not None: + stmt = stmt.where( + # K8sObjectORM.manifest.comparator.contains({"metadata": {"labels": filter.label_selector}}) + sqlalchemy.text("manifest -> 'metadata' -> 'labels' @> :labels").bindparams( + bindparam("labels", _filter.label_selector, type_=JSONB) + ) + ) + return stmt + + async def __get(self, meta: K8sObjectMeta, session: AsyncSession) -> K8sObjectORM | None: + stmt = self.__get_where_clauses(meta.to_filter()) + obj_orm = await session.scalar(stmt) + return obj_orm + + async def upsert(self, obj: K8sObject) -> None: + """Insert or update an object in the cache.""" + if obj.user_id is None: + raise errors.ValidationError(message="user_id is required to upsert k8s object.") + async with self.__session_maker() as session, session.begin(): + obj_orm = await self.__get(obj, session) + if obj_orm is not None: + obj_orm.manifest = obj.manifest + await session.commit() + await session.flush() + return + obj_orm = K8sObjectORM( + name=obj.name, + namespace=obj.namespace or "default", + group=obj.gvk.group, + kind=obj.gvk.kind, + version=obj.gvk.version, + manifest=obj.manifest.to_dict(), + cluster=str(obj.cluster), + user_id=obj.user_id, + ) + session.add(obj_orm) + await session.commit() + await session.flush() + return + + async def delete(self, meta: K8sObjectMeta) -> None: + """Delete an object from the cache.""" + async with self.__session_maker() as session, session.begin(): + obj_orm = await self.__get(meta, session) + if obj_orm is not None: + await session.delete(obj_orm) + + async def get(self, meta: K8sObjectMeta) -> K8sObject | None: + """Get a single object from the cache.""" + async with self.__session_maker() as session, session.begin(): + obj_orm = await self.__get(meta, session) + if obj_orm is not None: + return meta.with_manifest(obj_orm.manifest) + + return None + + async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: + """List objects from the cache.""" + async with self.__session_maker() as session, session.begin(): + stmt = self.__get_where_clauses(_filter) + async for res in await session.stream_scalars(stmt): + yield res.dump() diff --git a/components/renku_data_services/k8s_watcher/orm.py b/components/renku_data_services/k8s_watcher/orm.py new file mode 100644 index 000000000..5ff94c390 --- /dev/null +++ b/components/renku_data_services/k8s_watcher/orm.py @@ -0,0 +1,127 @@ +"""SQLAlchemy's schemas for the group database.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Any + +from box import Box +from sqlalchemy import ColumnElement, DateTime, MetaData, String, func, text +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.hybrid import Comparator, hybrid_property +from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column +from ulid import ULID + +from renku_data_services.base_orm.registry import COMMON_ORM_REGISTRY +from renku_data_services.errors import errors +from renku_data_services.k8s.constants import ClusterId +from renku_data_services.k8s.models import GVK, K8sObject +from renku_data_services.utils.sqlalchemy import ULIDType + + +class BaseORM(MappedAsDataclass, DeclarativeBase): + """Base class for all ORM classes.""" + + metadata = MetaData(schema="common") + registry = COMMON_ORM_REGISTRY + + +class CaseInsensitiveComparator(Comparator[str]): + """Enables case insensitive comparison of strings. + + See https://docs.sqlalchemy.org/en/20/orm/extensions/hybrid.html#building-custom-comparators. + """ + + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] + return func.lower(self.__clause_element__()) == func.lower(other) + + +class CaseInsensitiveNullableComparator(Comparator[str | None]): + """Enables case insensitive comparison of nullable string fields.""" + + def __eq__(self, other: Any) -> ColumnElement[bool]: # type: ignore[override] + return func.lower(self.__clause_element__()) == func.lower(other) + + +class K8sObjectORM(BaseORM): + """Representation of a k8s resource.""" + + __tablename__ = "k8s_objects" + + id: Mapped[ULID] = mapped_column( + "id", + ULIDType, + primary_key=True, + init=False, + default_factory=lambda: str(ULID()), + server_default=text("generate_ulid()"), + ) + name: Mapped[str] = mapped_column("name", String(), index=True, unique=True) + namespace: Mapped[str] = mapped_column("namespace", String(), index=True) + creation_date: Mapped[datetime] = mapped_column( + "creation_date", + DateTime(timezone=True), + server_default=func.now(), + init=False, + default=None, + ) + updated_at: Mapped[datetime] = mapped_column( + "updated_at", + DateTime(timezone=True), + server_default=func.now(), + server_onupdate=func.now(), + init=False, + default=None, + ) + manifest: Mapped[dict[str, Any]] = mapped_column("manifest", JSONB) + deleted: Mapped[bool] = mapped_column(default=False, init=False, index=True) + group: Mapped[str | None] = mapped_column(index=True, nullable=True) + version: Mapped[str] = mapped_column(index=True) + kind: Mapped[str] = mapped_column(index=True) + cluster: Mapped[str] = mapped_column(index=True) + user_id: Mapped[str] = mapped_column(String(), index=True) + + @hybrid_property + def group_insensitive(self) -> str | None: + """Case insensitive version of group.""" + if self.group: + return self.group.lower() + return None + + @hybrid_property + def kind_insensitive(self) -> str: + """Case insensitive version of kind.""" + return self.kind.lower() + + @hybrid_property + def version_insensitive(self) -> str: + """Case insensitive version of version.""" + return self.version.lower() + + @group_insensitive.inplace.comparator + @classmethod + def _group_insensitive_comparator(cls) -> CaseInsensitiveNullableComparator: + if cls.group is None: + raise errors.ProgrammingError(message="Cannot compare group with = if group is None") + return CaseInsensitiveNullableComparator(cls.group) + + @kind_insensitive.inplace.comparator + @classmethod + def _kind_insensitive_comparator(cls) -> CaseInsensitiveComparator: + return CaseInsensitiveComparator(cls.kind) + + @version_insensitive.inplace.comparator + @classmethod + def _version_insensitive_comparator(cls) -> CaseInsensitiveComparator: + return CaseInsensitiveComparator(cls.version) + + def dump(self) -> K8sObject: + """Convert to a k8s object.""" + return K8sObject( + name=self.name, + namespace=self.namespace, + cluster=ClusterId(self.cluster), + gvk=GVK(group=self.group, version=self.version, kind=self.kind), + manifest=Box(self.manifest), + user_id=self.user_id, + ) diff --git a/components/renku_data_services/message_queue/api.spec.yaml b/components/renku_data_services/message_queue/api.spec.yaml deleted file mode 100644 index 77c64b3ad..000000000 --- a/components/renku_data_services/message_queue/api.spec.yaml +++ /dev/null @@ -1,111 +0,0 @@ -openapi: 3.0.2 -info: - title: Renku Data Services API - description: | - This service is the main backend for Renku. It provides information about users, projects, - cloud storage, access to compute resources and many other things. - version: v1 -servers: - - url: /api/data -paths: - /message_queue/reprovision: - post: - summary: Start a new reprovisioning - description: Only a single reprovisioning is active at any time - responses: - "201": - description: The reprovisioning is/will be started - content: - application/json: - schema: - $ref: "#/components/schemas/Reprovisioning" - "409": - description: A reprovisioning is already started - default: - $ref: "#/components/responses/Error" - tags: - - message_queue - get: - summary: Return status of reprovisioning - responses: - "200": - description: Status of reprovisioning if there's one in progress - content: - application/json: - schema: - $ref: "#/components/schemas/ReprovisioningStatus" - "404": - description: There's no active reprovisioning - content: - application/json: - schema: - $ref: "#/components/schemas/ErrorResponse" - default: - $ref: "#/components/responses/Error" - tags: - - message_queue - delete: - summary: Stop an active reprovisioning - responses: - "204": - description: The reprovisioning was stopped or there was no one in progress - default: - $ref: "#/components/responses/Error" - tags: - - message_queue - -components: - schemas: - Reprovisioning: - description: A reprovisioning - type: object - properties: - id: - $ref: "#/components/schemas/Ulid" - start_date: - description: The date and time the reprovisioning was started (in UTC and ISO-8601 format) - type: string - format: date-time - example: "2023-11-01T17:32:28Z" - required: - - id - - start_date - ReprovisioningStatus: - description: Status of a reprovisioning - allOf: - - $ref: "#/components/schemas/Reprovisioning" - Ulid: - description: ULID identifier - type: string - minLength: 26 - maxLength: 26 - pattern: "^[0-7][0-9A-HJKMNP-TV-Z]{25}$" # This is case-insensitive - ErrorResponse: - type: object - properties: - error: - type: object - properties: - code: - type: integer - minimum: 0 - exclusiveMinimum: true - example: 1404 - detail: - type: string - example: A more detailed optional message showing what the problem was - message: - type: string - example: Something went wrong - please try again later - required: - - code - - message - required: - - error - responses: - Error: - description: The schema for all 4xx and 5xx responses - content: - application/json: - schema: - $ref: "#/components/schemas/ErrorResponse" diff --git a/components/renku_data_services/message_queue/apispec.py b/components/renku_data_services/message_queue/apispec.py deleted file mode 100644 index 00c397552..000000000 --- a/components/renku_data_services/message_queue/apispec.py +++ /dev/null @@ -1,42 +0,0 @@ -# generated by datamodel-codegen: -# filename: api.spec.yaml -# timestamp: 2024-10-18T11:06:25+00:00 - -from __future__ import annotations - -from datetime import datetime -from typing import Optional - -from pydantic import Field -from renku_data_services.message_queue.apispec_base import BaseAPISpec - - -class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) - detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" - ) - message: str = Field(..., example="Something went wrong - please try again later") - - -class ErrorResponse(BaseAPISpec): - error: Error - - -class Reprovisioning(BaseAPISpec): - id: str = Field( - ..., - description="ULID identifier", - max_length=26, - min_length=26, - pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", - ) - start_date: datetime = Field( - ..., - description="The date and time the reprovisioning was started (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", - ) - - -class ReprovisioningStatus(Reprovisioning): - pass diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/__init__.py b/components/renku_data_services/message_queue/avro_models/io/renku/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/__init__.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/__init__.py deleted file mode 100644 index 4f85ab279..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from renku_data_services.message_queue.avro_models.io.renku.events.header import Header diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/header.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/header.py deleted file mode 100644 index 8d887fafb..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/header.py +++ /dev/null @@ -1,81 +0,0 @@ -from dataclasses import asdict, dataclass -from datetime import datetime -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class Header(AvroModel): - """ - common headers for messages - """ - source: str - type: str - dataContentType: str - schemaVersion: str - time: datetime # logicalType: timestamp-millis - requestId: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "Header", - "namespace": "io.renku.events", - "doc": "common headers for messages", - "fields": [ - { - "name": "source", - "type": "string" - }, - { - "name": "type", - "type": "string" - }, - { - "name": "dataContentType", - "type": "string" - }, - { - "name": "schemaVersion", - "type": "string" - }, - { - "name": "time", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - }, - { - "name": "requestId", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'Header': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/__init__.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/__init__.py deleted file mode 100644 index 0b849d58f..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from renku_data_services.message_queue.avro_models.io.renku.events.v1.header import Header -from renku_data_services.message_queue.avro_models.io.renku.events.v1.project_member_role import ProjectMemberRole -from renku_data_services.message_queue.avro_models.io.renku.events.v1.project_authorization_removed import ProjectAuthorizationRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v1.project_authorization_updated import ProjectAuthorizationUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v1.visibility import Visibility -from renku_data_services.message_queue.avro_models.io.renku.events.v1.project_authorization_added import ProjectAuthorizationAdded -from renku_data_services.message_queue.avro_models.io.renku.events.v1.project_removed import ProjectRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v1.project_updated import ProjectUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v1.user_added import UserAdded -from renku_data_services.message_queue.avro_models.io.renku.events.v1.user_removed import UserRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v1.user_updated import UserUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v1.project_created import ProjectCreated diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/header.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/header.py deleted file mode 100644 index 45c6db993..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/header.py +++ /dev/null @@ -1,81 +0,0 @@ -from dataclasses import asdict, dataclass -from datetime import datetime -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class Header(AvroModel): - """ - common headers for messages - """ - source: str - type: str - dataContentType: str - schemaVersion: str - time: datetime # logicalType: timestamp-millis - requestId: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "Header", - "namespace": "io.renku.events.v1", - "doc": "common headers for messages", - "fields": [ - { - "name": "source", - "type": "string" - }, - { - "name": "type", - "type": "string" - }, - { - "name": "dataContentType", - "type": "string" - }, - { - "name": "schemaVersion", - "type": "string" - }, - { - "name": "time", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - }, - { - "name": "requestId", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'Header': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_added.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_added.py deleted file mode 100644 index 5611c6bba..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_added.py +++ /dev/null @@ -1,64 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v1 import \ - ProjectMemberRole -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectAuthorizationAdded(AvroModel): - """ - Event raised when an authorization for a project is added for a user - """ - projectId: str - userId: str - role: ProjectMemberRole - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectAuthorizationAdded", - "namespace": "io.renku.events.v1", - "doc": "Event raised when an authorization for a project is added for a user", - "fields": [ - { - "name": "projectId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - }, - { - "name": "role", - "type": "io.renku.events.v1.ProjectMemberRole" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectAuthorizationAdded': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_removed.py deleted file mode 100644 index 2e1cf0085..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_removed.py +++ /dev/null @@ -1,57 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectAuthorizationRemoved(AvroModel): - """ - Event raised when an authorization for a project is removed for a user - """ - projectId: str - userId: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectAuthorizationRemoved", - "namespace": "io.renku.events.v1", - "doc": "Event raised when an authorization for a project is removed for a user", - "fields": [ - { - "name": "projectId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectAuthorizationRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_updated.py deleted file mode 100644 index be2eca0c8..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_authorization_updated.py +++ /dev/null @@ -1,64 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v1 import \ - ProjectMemberRole -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectAuthorizationUpdated(AvroModel): - """ - Event raised when an authorization for a project is modified - """ - projectId: str - userId: str - role: ProjectMemberRole - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectAuthorizationUpdated", - "namespace": "io.renku.events.v1", - "doc": "Event raised when an authorization for a project is modified", - "fields": [ - { - "name": "projectId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - }, - { - "name": "role", - "type": "io.renku.events.v1.ProjectMemberRole" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectAuthorizationUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_created.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_created.py deleted file mode 100644 index d08bee4c8..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_created.py +++ /dev/null @@ -1,109 +0,0 @@ -from dataclasses import asdict, dataclass -from datetime import datetime -from typing import ClassVar, Dict, List, Optional - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v1 import \ - Visibility -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectCreated(AvroModel): - """ - Event raised when a new project is created - """ - id: str - name: str - slug: str - repositories: List[str] - visibility: Visibility - description: Optional[str] - keywords: List[str] - createdBy: str - creationDate: datetime # logicalType: timestamp-millis - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectCreated", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a new project is created", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v1.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "createdBy", - "type": "string" - }, - { - "name": "creationDate", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectCreated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_member_role.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_member_role.py deleted file mode 100644 index a98c4f0c2..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_member_role.py +++ /dev/null @@ -1,9 +0,0 @@ -from enum import Enum - - -class ProjectMemberRole(Enum): - """ - Access role of a project member - """ - MEMBER = 'MEMBER' - OWNER = 'OWNER' diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_removed.py deleted file mode 100644 index 808254534..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_removed.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectRemoved(AvroModel): - """ - Event raised when a project is removed - """ - id: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectRemoved", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a project is removed", - "fields": [ - { - "name": "id", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_updated.py deleted file mode 100644 index e5768f994..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/project_updated.py +++ /dev/null @@ -1,95 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, List, Optional - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v1 import \ - Visibility -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectUpdated(AvroModel): - """ - Event raised when a project is updated - """ - id: str - name: str - slug: str - repositories: List[str] - visibility: Visibility - description: Optional[str] - keywords: List[str] - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectUpdated", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a project is updated", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v1.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_added.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_added.py deleted file mode 100644 index 5ce4fe8a9..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_added.py +++ /dev/null @@ -1,76 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, Optional - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class UserAdded(AvroModel): - """ - Event raised when a new user is added - """ - id: str - firstName: Optional[str] - lastName: Optional[str] - email: Optional[str] - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "UserAdded", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a new user is added", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "firstName", - "type": [ - "null", - "string" - ] - }, - { - "name": "lastName", - "type": [ - "null", - "string" - ] - }, - { - "name": "email", - "type": [ - "null", - "string" - ] - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'UserAdded': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_removed.py deleted file mode 100644 index 6e502c1f2..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_removed.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class UserRemoved(AvroModel): - """ - Event raised when a user is removed - """ - id: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "UserRemoved", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a user is removed", - "fields": [ - { - "name": "id", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'UserRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_updated.py deleted file mode 100644 index 46a376637..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/user_updated.py +++ /dev/null @@ -1,76 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, Optional - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class UserUpdated(AvroModel): - """ - Event raised when a user is updated - """ - id: str - firstName: Optional[str] - lastName: Optional[str] - email: Optional[str] - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "UserUpdated", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a user is updated", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "firstName", - "type": [ - "null", - "string" - ] - }, - { - "name": "lastName", - "type": [ - "null", - "string" - ] - }, - { - "name": "email", - "type": [ - "null", - "string" - ] - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'UserUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/visibility.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/visibility.py deleted file mode 100644 index 1511235b5..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v1/visibility.py +++ /dev/null @@ -1,9 +0,0 @@ -from enum import Enum - - -class Visibility(Enum): - """ - Visibility setting - """ - PUBLIC = 'PUBLIC' - PRIVATE = 'PRIVATE' diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/__init__.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/__init__.py deleted file mode 100644 index 1d1afd3e4..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -from renku_data_services.message_queue.avro_models.io.renku.events.v2.group_added import GroupAdded -from renku_data_services.message_queue.avro_models.io.renku.events.v2.member_role import MemberRole -from renku_data_services.message_queue.avro_models.io.renku.events.v2.group_member_removed import GroupMemberRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v2.group_member_updated import GroupMemberUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v2.group_removed import GroupRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v2.group_updated import GroupUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v2.group_member_added import GroupMemberAdded -from renku_data_services.message_queue.avro_models.io.renku.events.v2.visibility import Visibility -from renku_data_services.message_queue.avro_models.io.renku.events.v2.project_member_added import ProjectMemberAdded -from renku_data_services.message_queue.avro_models.io.renku.events.v2.project_member_removed import ProjectMemberRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v2.project_member_updated import ProjectMemberUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v2.project_removed import ProjectRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v2.project_updated import ProjectUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v2.reprovisioning_finished import ReprovisioningFinished -from renku_data_services.message_queue.avro_models.io.renku.events.v2.reprovisioning_started import ReprovisioningStarted -from renku_data_services.message_queue.avro_models.io.renku.events.v2.user_added import UserAdded -from renku_data_services.message_queue.avro_models.io.renku.events.v2.user_removed import UserRemoved -from renku_data_services.message_queue.avro_models.io.renku.events.v2.user_updated import UserUpdated -from renku_data_services.message_queue.avro_models.io.renku.events.v2.project_created import ProjectCreated diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_added.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_added.py deleted file mode 100644 index fdfbbdb46..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_added.py +++ /dev/null @@ -1,70 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, Optional - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class GroupAdded(AvroModel): - """ - Event raised when a new group is added - """ - id: str - name: str - description: Optional[str] - namespace: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "GroupAdded", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a new group is added", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "namespace", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'GroupAdded': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_added.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_added.py deleted file mode 100644 index 6a7a50af1..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_added.py +++ /dev/null @@ -1,64 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import \ - MemberRole -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class GroupMemberAdded(AvroModel): - """ - Event raised when a member is added to a group - """ - groupId: str - userId: str - role: MemberRole - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "GroupMemberAdded", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a member is added to a group", - "fields": [ - { - "name": "groupId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'GroupMemberAdded': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_removed.py deleted file mode 100644 index f15f99ccb..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_removed.py +++ /dev/null @@ -1,57 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class GroupMemberRemoved(AvroModel): - """ - Event raised when a member is removed from a group - """ - groupId: str - userId: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "GroupMemberRemoved", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a member is removed from a group", - "fields": [ - { - "name": "groupId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'GroupMemberRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_updated.py deleted file mode 100644 index 530cb3ab4..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_member_updated.py +++ /dev/null @@ -1,64 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import \ - MemberRole -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class GroupMemberUpdated(AvroModel): - """ - Event raised when a member is updated in a group - """ - groupId: str - userId: str - role: MemberRole - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "GroupMemberUpdated", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a member is updated in a group", - "fields": [ - { - "name": "groupId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'GroupMemberUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_removed.py deleted file mode 100644 index ae5d0921c..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_removed.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class GroupRemoved(AvroModel): - """ - Event raised when a group is removed - """ - id: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "GroupRemoved", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a group is removed", - "fields": [ - { - "name": "id", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'GroupRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_updated.py deleted file mode 100644 index b4c1c6970..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/group_updated.py +++ /dev/null @@ -1,70 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, Optional - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class GroupUpdated(AvroModel): - """ - Event raised when a group is updated - """ - id: str - name: str - description: Optional[str] - namespace: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "GroupUpdated", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a group is updated", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "namespace", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'GroupUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/member_role.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/member_role.py deleted file mode 100644 index 49a5557a4..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/member_role.py +++ /dev/null @@ -1,10 +0,0 @@ -from enum import Enum - - -class MemberRole(Enum): - """ - Access role of a member - """ - OWNER = 'OWNER' - EDITOR = 'EDITOR' - VIEWER = 'VIEWER' diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_created.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_created.py deleted file mode 100644 index 7045945d3..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_created.py +++ /dev/null @@ -1,114 +0,0 @@ -from dataclasses import asdict, dataclass -from datetime import datetime -from typing import ClassVar, Dict, List, Optional - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import \ - Visibility -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectCreated(AvroModel): - """ - Event raised when a new project is created - """ - id: str - name: str - namespace: str - slug: str - repositories: List[str] - visibility: Visibility - description: Optional[str] - keywords: List[str] - createdBy: str - creationDate: datetime # logicalType: timestamp-millis - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectCreated", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a new project is created", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "namespace", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v2.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "createdBy", - "type": "string" - }, - { - "name": "creationDate", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectCreated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_added.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_added.py deleted file mode 100644 index 006fff718..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_added.py +++ /dev/null @@ -1,64 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import \ - MemberRole -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectMemberAdded(AvroModel): - """ - Event raised when a user is added to a project - """ - projectId: str - userId: str - role: MemberRole - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectMemberAdded", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a user is added to a project", - "fields": [ - { - "name": "projectId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectMemberAdded': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_removed.py deleted file mode 100644 index b3dc32603..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_removed.py +++ /dev/null @@ -1,57 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectMemberRemoved(AvroModel): - """ - Event raised when a user is removed from a project - """ - projectId: str - userId: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectMemberRemoved", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a user is removed from a project", - "fields": [ - { - "name": "projectId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectMemberRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_updated.py deleted file mode 100644 index 0ae546770..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_member_updated.py +++ /dev/null @@ -1,64 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import \ - MemberRole -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectMemberUpdated(AvroModel): - """ - Event raised when user role on a project is modified - """ - projectId: str - userId: str - role: MemberRole - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectMemberUpdated", - "namespace": "io.renku.events.v2", - "doc": "Event raised when user role on a project is modified", - "fields": [ - { - "name": "projectId", - "type": "string" - }, - { - "name": "userId", - "type": "string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectMemberUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_removed.py deleted file mode 100644 index ee9051be6..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_removed.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectRemoved(AvroModel): - """ - Event raised when a project is removed - """ - id: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectRemoved", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a project is removed", - "fields": [ - { - "name": "id", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_updated.py deleted file mode 100644 index 4f6d30237..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/project_updated.py +++ /dev/null @@ -1,100 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, List, Optional - -from dataclasses_avroschema import AvroModel -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import \ - Visibility -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ProjectUpdated(AvroModel): - """ - Event raised when a project is updated - """ - id: str - name: str - namespace: str - slug: str - repositories: List[str] - visibility: Visibility - description: Optional[str] - keywords: List[str] - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ProjectUpdated", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a project is updated", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "namespace", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v2.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ProjectUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_finished.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_finished.py deleted file mode 100644 index f36fdc534..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_finished.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ReprovisioningFinished(AvroModel): - """ - Event raised when a reprovisioning of data events finished - """ - id: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ReprovisioningFinished", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a reprovisioning of data events finished", - "fields": [ - { - "name": "id", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ReprovisioningFinished': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_started.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_started.py deleted file mode 100644 index b14227084..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/reprovisioning_started.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class ReprovisioningStarted(AvroModel): - """ - Event raised when a reprovisioning of data events started - """ - id: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "ReprovisioningStarted", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a reprovisioning of data events started", - "fields": [ - { - "name": "id", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'ReprovisioningStarted': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_added.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_added.py deleted file mode 100644 index 85ad2882c..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_added.py +++ /dev/null @@ -1,81 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, Optional - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class UserAdded(AvroModel): - """ - Event raised when a new user is added - """ - id: str - firstName: Optional[str] - lastName: Optional[str] - email: Optional[str] - namespace: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "UserAdded", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a new user is added", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "firstName", - "type": [ - "null", - "string" - ] - }, - { - "name": "lastName", - "type": [ - "null", - "string" - ] - }, - { - "name": "email", - "type": [ - "null", - "string" - ] - }, - { - "name": "namespace", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'UserAdded': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_removed.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_removed.py deleted file mode 100644 index 3ab8dc619..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_removed.py +++ /dev/null @@ -1,52 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class UserRemoved(AvroModel): - """ - Event raised when a user is removed - """ - id: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "UserRemoved", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a user is removed", - "fields": [ - { - "name": "id", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'UserRemoved': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_updated.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_updated.py deleted file mode 100644 index c11929da3..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/user_updated.py +++ /dev/null @@ -1,81 +0,0 @@ -from dataclasses import asdict, dataclass -from typing import ClassVar, Dict, Optional - -from dataclasses_avroschema import AvroModel -from undictify import type_checked_constructor - - -@type_checked_constructor() -@dataclass -class UserUpdated(AvroModel): - """ - Event raised when a user is updated - """ - id: str - firstName: Optional[str] - lastName: Optional[str] - email: Optional[str] - namespace: str - - #: The Avro Schema associated to this class - _schema: ClassVar[str] = """{ - "type": "record", - "name": "UserUpdated", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a user is updated", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "firstName", - "type": [ - "null", - "string" - ] - }, - { - "name": "lastName", - "type": [ - "null", - "string" - ] - }, - { - "name": "email", - "type": [ - "null", - "string" - ] - }, - { - "name": "namespace", - "type": "string" - } - ] - }""" - - def serialize_json(self) -> str: - """ - Returns an Avro-json representation of this instance. - """ - return self.serialize(serialization_type='avro-json').decode('ascii') - - def to_dict(self) -> Dict: - """ - Returns a dictionary version of this instance. - """ - return asdict(self) - - @classmethod - def from_dict( - cls, - the_dict: Dict - ) -> 'UserUpdated': - """ - Returns an instance of this class from a dictionary. - - :param the_dict: The dictionary from which to create an instance of this class. - """ - return cls(**the_dict) diff --git a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/visibility.py b/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/visibility.py deleted file mode 100644 index 1511235b5..000000000 --- a/components/renku_data_services/message_queue/avro_models/io/renku/events/v2/visibility.py +++ /dev/null @@ -1,9 +0,0 @@ -from enum import Enum - - -class Visibility(Enum): - """ - Visibility setting - """ - PUBLIC = 'PUBLIC' - PRIVATE = 'PRIVATE' diff --git a/components/renku_data_services/message_queue/blueprints.py b/components/renku_data_services/message_queue/blueprints.py deleted file mode 100644 index 596751d07..000000000 --- a/components/renku_data_services/message_queue/blueprints.py +++ /dev/null @@ -1,80 +0,0 @@ -"""Search/reprovisioning blueprint.""" - -from collections.abc import Callable -from dataclasses import dataclass - -from sanic import HTTPResponse, Request, json -from sanic.response import JSONResponse -from sqlalchemy.ext.asyncio import AsyncSession - -import renku_data_services.base_models as base_models -from renku_data_services.authz.authz import Authz -from renku_data_services.base_api.auth import authenticate, only_admins -from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint -from renku_data_services.message_queue.core import reprovision -from renku_data_services.message_queue.db import ReprovisioningRepository -from renku_data_services.namespace.db import GroupRepository -from renku_data_services.project.db import ProjectRepository -from renku_data_services.users.db import UserRepo - - -@dataclass(kw_only=True) -class MessageQueueBP(CustomBlueprint): - """Handlers for message queue.""" - - authenticator: base_models.Authenticator - session_maker: Callable[..., AsyncSession] - reprovisioning_repo: ReprovisioningRepository - user_repo: UserRepo - group_repo: GroupRepository - project_repo: ProjectRepository - authz: Authz - - def post(self) -> BlueprintFactoryResponse: - """Start a new reprovisioning.""" - - @authenticate(self.authenticator) - @only_admins - async def _post(request: Request, user: base_models.APIUser) -> HTTPResponse | JSONResponse: - reprovisioning = await self.reprovisioning_repo.start() - - request.app.add_task( - reprovision( - session_maker=self.session_maker, - requested_by=user, - reprovisioning=reprovisioning, - reprovisioning_repo=self.reprovisioning_repo, - user_repo=self.user_repo, - group_repo=self.group_repo, - project_repo=self.project_repo, - authz=self.authz, - ), - name=f"reprovisioning-{reprovisioning.id}", - ) - - return json({"id": str(reprovisioning.id), "start_date": reprovisioning.start_date.isoformat()}, 201) - - return "/message_queue/reprovision", ["POST"], _post - - def get_status(self) -> BlueprintFactoryResponse: - """Get reprovisioning status.""" - - @authenticate(self.authenticator) - async def _get_status(_: Request, __: base_models.APIUser) -> JSONResponse | HTTPResponse: - reprovisioning = await self.reprovisioning_repo.get_active_reprovisioning() - if not reprovisioning: - return HTTPResponse(status=404) - return json({"id": str(reprovisioning.id), "start_date": reprovisioning.start_date.isoformat()}) - - return "/message_queue/reprovision", ["GET"], _get_status - - def delete(self) -> BlueprintFactoryResponse: - """Stop reprovisioning (if any).""" - - @authenticate(self.authenticator) - @only_admins - async def _delete(_: Request, __: base_models.APIUser) -> HTTPResponse: - await self.reprovisioning_repo.stop() - return HTTPResponse(status=204) - - return "/message_queue/reprovision", ["DELETE"], _delete diff --git a/components/renku_data_services/message_queue/config.py b/components/renku_data_services/message_queue/config.py deleted file mode 100644 index 09f25a864..000000000 --- a/components/renku_data_services/message_queue/config.py +++ /dev/null @@ -1,79 +0,0 @@ -"""Configuration for message queue client.""" - -import os -import random -from dataclasses import dataclass, field - -import redis.asyncio as redis - - -@dataclass -class RedisConfig: - """Message queue configuration.""" - - password: str = field(repr=False) - is_sentinel: bool = False - host: str = "renku-redis" - port: int = 6379 - database: int = 0 - sentinel_master_set: str = "mymaster" - - _connection: redis.Redis | None = None - - @classmethod - def from_env(cls, prefix: str = "") -> "RedisConfig": - """Create a config from environment variables.""" - is_sentinel = os.environ.get(f"{prefix}REDIS_IS_SENTINEL", "false") - host = os.environ.get(f"{prefix}REDIS_HOST", "localhost") - port = os.environ.get(f"{prefix}REDIS_PORT", 6379) - database = os.environ.get(f"{prefix}REDIS_DATABASE", 0) - sentinel_master_set = os.environ.get(f"{prefix}REDIS_MASTER_SET", "mymaster") - password = os.environ.get(f"{prefix}REDIS_PASSWORD", "") - - return cls( - host=host, - port=int(port), - database=int(database), - password=password, - sentinel_master_set=sentinel_master_set, - is_sentinel=is_sentinel.lower() == "true", - ) - - @classmethod - def fake(cls) -> "RedisConfig": - """Create a config using fake redis.""" - import fakeredis - - instance = cls(password="") # nosec B106 - # by default, fake redis shares instances across instantiations. We want a new instance per test, - # so we change the port. - instance._connection = fakeredis.FakeAsyncRedis(port=random.randint(1000, 65535)) # nosec: B311 - return instance - - @property - def redis_connection(self) -> redis.Redis: - """Get a redis connection.""" - if self._connection is None: - if self.is_sentinel: - sentinel = redis.Sentinel([(self.host, self.port)], sentinel_kwargs={"password": self.password}) - self._connection = sentinel.master_for( - self.sentinel_master_set, - db=self.database, - password=self.password, - retry_on_timeout=True, - health_check_interval=60, - ) - else: - self._connection = redis.Redis( - host=self.host, - port=self.port, - db=self.database, - password=self.password, - retry_on_timeout=True, - health_check_interval=60, - ) - return self._connection - - def reset_redis_connection(self) -> None: - """Forces a full reconnect to redis.""" - self._connection = None diff --git a/components/renku_data_services/message_queue/converters.py b/components/renku_data_services/message_queue/converters.py deleted file mode 100644 index e6c48d3bf..000000000 --- a/components/renku_data_services/message_queue/converters.py +++ /dev/null @@ -1,389 +0,0 @@ -"""Converter of models to Avro schemas for events.""" - -from typing import Final, TypeVar, cast - -from dataclasses_avroschema import AvroModel - -from renku_data_services.authz import models as authz_models -from renku_data_services.errors import errors -from renku_data_services.message_queue import events -from renku_data_services.message_queue.avro_models.io.renku.events import v2 -from renku_data_services.message_queue.models import Event -from renku_data_services.namespace import models as group_models -from renku_data_services.project import models as project_models -from renku_data_services.users import models as user_models - -QUEUE_NAME: Final[str] = "data_service.all_events" -EventType = TypeVar("EventType", type[AvroModel], type[events.AmbiguousEvent], covariant=True) - - -def make_event(message_type: str, payload: AvroModel) -> Event: - """Create an event.""" - return Event.create(QUEUE_NAME, message_type, payload) - - -def _make_project_member_added_event(member: authz_models.Member) -> Event: - """Create a ProjectMemberAdded event.""" - payload = v2.ProjectMemberAdded( - projectId=str(member.resource_id), userId=member.user_id, role=_convert_member_role(member.role) - ) - return make_event("projectAuth.added", payload) - - -def _make_group_member_added_event(member: authz_models.Member) -> Event: - """Create a GroupMemberAdded event.""" - payload = v2.GroupMemberAdded( - groupId=str(member.resource_id), userId=member.user_id, role=_convert_member_role(member.role) - ) - return make_event("memberGroup.added", payload) - - -class _ProjectEventConverter: - @staticmethod - def _convert_project_visibility(visibility: authz_models.Visibility) -> v2.Visibility: - match visibility: - case authz_models.Visibility.PUBLIC: - return v2.Visibility.PUBLIC - case authz_models.Visibility.PRIVATE: - return v2.Visibility.PRIVATE - case _: - raise errors.EventError( - message=f"Trying to convert an unknown project visibility {visibility} to message visibility" - ) - - @staticmethod - def to_events( - project: project_models.Project | project_models.DeletedProject, event_type: EventType - ) -> list[Event]: - if project.id is None: - raise errors.EventError( - message=f"Cannot create an event of type {event_type} for a project which has no ID" - ) - project_id_str = str(project.id) - match event_type: - case v2.ProjectCreated: - project = cast(project_models.Project, project) - return [ - make_event( - "project.created", - v2.ProjectCreated( - id=project_id_str, - name=project.name, - namespace=project.namespace.slug, - slug=project.slug, - repositories=project.repositories, - visibility=_ProjectEventConverter._convert_project_visibility(project.visibility), - description=project.description, - createdBy=project.created_by, - creationDate=project.creation_date, - keywords=project.keywords or [], - ), - ), - make_event( - "projectAuth.added", - v2.ProjectMemberAdded( - projectId=project_id_str, - userId=project.created_by, - role=v2.MemberRole.OWNER, - ), - ), - ] - case v2.ProjectUpdated: - project = cast(project_models.Project, project) - return [ - make_event( - "project.updated", - v2.ProjectUpdated( - id=project_id_str, - name=project.name, - namespace=project.namespace.slug, - slug=project.slug, - repositories=project.repositories, - visibility=_ProjectEventConverter._convert_project_visibility(project.visibility), - description=project.description, - keywords=project.keywords or [], - ), - ) - ] - case v2.ProjectRemoved: - return [make_event("project.removed", v2.ProjectRemoved(id=project_id_str))] - case _: - raise errors.EventError(message=f"Trying to convert a project to an unknown event type {event_type}") - - -class _UserEventConverter: - @staticmethod - def to_events( - user: user_models.UserInfo | user_models.UserInfoUpdate | user_models.DeletedUser, event_type: EventType - ) -> list[Event]: - match event_type: - case v2.UserAdded | events.InsertUserNamespace: - user = cast(user_models.UserInfo, user) - return [ - make_event( - "user.added", - v2.UserAdded( - id=user.id, - firstName=user.first_name, - lastName=user.last_name, - email=user.email, - namespace=user.namespace.slug, - ), - ) - ] - case v2.UserRemoved: - deleted_user = cast(user_models.DeletedUser, user) - return [make_event("user.removed", v2.UserRemoved(id=deleted_user.id))] - case events.UpdateOrInsertUser: - user = cast(user_models.UserInfoUpdate, user) - if user.old is None: - return [ - make_event( - "user.added", - v2.UserAdded( - id=user.new.id, - firstName=user.new.first_name, - lastName=user.new.last_name, - email=user.new.email, - namespace=user.new.namespace.slug, - ), - ) - ] - else: - return [ - make_event( - "user.updated", - v2.UserUpdated( - id=user.new.id, - firstName=user.new.first_name, - lastName=user.new.last_name, - email=user.new.email, - namespace=user.new.namespace.slug, - ), - ) - ] - case _: - raise errors.EventError( - message=f"Trying to convert a user of type {type(user)} to an unknown event type {event_type}" - ) - - -def _convert_member_role(role: authz_models.Role) -> v2.MemberRole: - match role: - case authz_models.Role.EDITOR: - return v2.MemberRole.EDITOR - case authz_models.Role.VIEWER: - return v2.MemberRole.VIEWER - case authz_models.Role.OWNER: - return v2.MemberRole.OWNER - case _: - raise errors.EventError(message=f"Cannot convert role {role} to an event") - - -class _ProjectAuthzEventConverter: - @staticmethod - def to_events(member_changes: list[authz_models.MembershipChange]) -> list[Event]: - output: list[Event] = [] - for change in member_changes: - resource_id = str(change.member.resource_id) - match change.change: - case authz_models.Change.UPDATE: - output.append( - make_event( - "projectAuth.updated", - v2.ProjectMemberUpdated( - projectId=resource_id, - userId=change.member.user_id, - role=_convert_member_role(change.member.role), - ), - ) - ) - case authz_models.Change.REMOVE: - output.append( - make_event( - "projectAuth.removed", - v2.ProjectMemberRemoved( - projectId=resource_id, - userId=change.member.user_id, - ), - ) - ) - case authz_models.Change.ADD: - output.append( - _make_project_member_added_event(change.member), - ) - case _: - raise errors.EventError( - message="Trying to convert a project membership change to an unknown event type with " - f"unknown change {change.change}" - ) - return output - - @staticmethod - def to_events_from_event_type(member: authz_models.Member, event_type: type[AvroModel]) -> list[Event]: - match event_type: - case v2.ProjectMemberAdded: - return [ - _make_project_member_added_event(member), - ] - case _: - raise errors.EventError( - message=f"Trying to convert a project member to an unknown event type {event_type}" - ) - - -class _GroupAuthzEventConverter: - @staticmethod - def to_events(member_changes: list[authz_models.MembershipChange]) -> list[Event]: - output: list[Event] = [] - for change in member_changes: - resource_id = str(change.member.resource_id) - match change.change: - case authz_models.Change.UPDATE: - output.append( - make_event( - "memberGroup.updated", - v2.GroupMemberUpdated( - groupId=resource_id, - userId=change.member.user_id, - role=_convert_member_role(change.member.role), - ), - ) - ) - case authz_models.Change.REMOVE: - output.append( - make_event( - "memberGroup.removed", - v2.GroupMemberRemoved( - groupId=resource_id, - userId=change.member.user_id, - ), - ) - ) - case authz_models.Change.ADD: - output.append( - _make_group_member_added_event(change.member), - ) - case _: - raise errors.EventError( - message="Trying to convert a group membership change to an unknown event type with " - f"unknown change {change.change}" - ) - return output - - @staticmethod - def to_events_from_event_type(member: authz_models.Member, event_type: type[AvroModel]) -> list[Event]: - match event_type: - case v2.GroupMemberAdded: - return [ - _make_group_member_added_event(member), - ] - case _: - raise errors.EventError( - message=f"Trying to convert a group member to an unknown event type {event_type}" - ) - - -class _GroupEventConverter: - @staticmethod - def to_events(group: group_models.Group | group_models.DeletedGroup, event_type: EventType) -> list[Event]: - if group.id is None: - raise errors.ProgrammingError( - message="Cannot send group events to the message queue for a group that does not have an ID" - ) - group_id = str(group.id) - match event_type: - case v2.GroupAdded: - group = cast(group_models.Group, group) - return [ - make_event( - "group.added", - v2.GroupAdded( - id=group_id, name=group.name, description=group.description, namespace=group.slug - ), - ), - make_event( - "memberGroup.added", - v2.GroupMemberAdded( - groupId=group_id, - userId=group.created_by, - role=v2.MemberRole.OWNER, - ), - ), - ] - case v2.GroupUpdated: - group = cast(group_models.Group, group) - return [ - make_event( - "group.updated", - v2.GroupUpdated( - id=group_id, name=group.name, description=group.description, namespace=group.slug - ), - ) - ] - case v2.GroupRemoved: - return [make_event("group.removed", v2.GroupRemoved(id=group_id))] - case _: - raise errors.ProgrammingError( - message=f"Received an unknown event type {event_type} when generating group events" - ) - - -_T = TypeVar("_T") - - -class EventConverter: - """Generates events from any type of data service models.""" - - @staticmethod - def to_events(input: _T, event_type: EventType) -> list[Event]: - """Generate an event for a data service model based on an event type.""" - if not input: - return [] - - match event_type: - case v2.ProjectCreated: - project = cast(project_models.Project, input) - return _ProjectEventConverter.to_events(project, event_type) - case v2.ProjectUpdated: - project_update = cast(project_models.ProjectUpdate, input) - project = project_update.new - return _ProjectEventConverter.to_events(project, event_type) - case v2.ProjectRemoved: - deleted_project = cast(project_models.DeletedProject, input) - return _ProjectEventConverter.to_events(deleted_project, event_type) - case events.ProjectMembershipChanged: - project_authz = cast(list[authz_models.MembershipChange], input) - return _ProjectAuthzEventConverter.to_events(project_authz) - case v2.ProjectMemberAdded: - project_member = cast(authz_models.Member, input) - return _ProjectAuthzEventConverter.to_events_from_event_type(project_member, event_type) - case v2.GroupAdded | v2.GroupUpdated: - group = cast(group_models.Group, input) - return _GroupEventConverter.to_events(group, event_type) - case v2.GroupRemoved: - deleted_group = cast(group_models.DeletedGroup, input) - return _GroupEventConverter.to_events(deleted_group, event_type) - case events.GroupMembershipChanged: - group_authz = cast(list[authz_models.MembershipChange], input) - return _GroupAuthzEventConverter.to_events(group_authz) - case v2.GroupMemberAdded: - group_member = cast(authz_models.Member, input) - return _GroupAuthzEventConverter.to_events_from_event_type(group_member, event_type) - case v2.UserAdded: - user_with_namespace = cast(user_models.UserInfo, input) - return _UserEventConverter.to_events(user_with_namespace, event_type) - case v2.UserRemoved: - deleted_user = cast(user_models.DeletedUser, input) - return _UserEventConverter.to_events(deleted_user, event_type) - case events.UpdateOrInsertUser: - user_with_namespace_update = cast(user_models.UserInfoUpdate, input) - return _UserEventConverter.to_events(user_with_namespace_update, event_type) - case events.InsertUserNamespace: - user_namespaces = cast(list[user_models.UserInfo], input) - output: list[Event] = [] - for namespace in user_namespaces: - output.extend(_UserEventConverter.to_events(namespace, event_type)) - return output - case _: - raise errors.EventError(message=f"Trying to convert an unknown event type {event_type}") diff --git a/components/renku_data_services/message_queue/core.py b/components/renku_data_services/message_queue/core.py deleted file mode 100644 index f7ed1bfde..000000000 --- a/components/renku_data_services/message_queue/core.py +++ /dev/null @@ -1,120 +0,0 @@ -"""Business logic for message queue and events.""" - -import json -from collections.abc import AsyncGenerator, Callable - -from sanic.log import logger -from sqlalchemy import text -from sqlalchemy.ext.asyncio import AsyncSession - -from renku_data_services.authz.authz import Authz, ResourceType -from renku_data_services.base_models import APIUser -from renku_data_services.message_queue.avro_models.io.renku.events import v2 -from renku_data_services.message_queue.converters import EventConverter, EventType, make_event -from renku_data_services.message_queue.db import ReprovisioningRepository -from renku_data_services.message_queue.models import Event, Reprovisioning -from renku_data_services.message_queue.orm import EventORM -from renku_data_services.namespace.db import GroupRepository -from renku_data_services.project.db import ProjectRepository -from renku_data_services.users.db import UserRepo - - -async def reprovision( - session_maker: Callable[..., AsyncSession], - requested_by: APIUser, - reprovisioning: Reprovisioning, - reprovisioning_repo: ReprovisioningRepository, - user_repo: UserRepo, - group_repo: GroupRepository, - project_repo: ProjectRepository, - authz: Authz, -) -> None: - """Create and send various data service events required for reprovisioning the message queue.""" - logger.info(f"Starting reprovisioning with ID {reprovisioning.id}") - - async def process_events(records: AsyncGenerator, event_type: EventType) -> None: - """Create and store an event.""" - count = 0 - - async for entity in records: - events = EventConverter.to_events(entity, event_type=event_type) - await store_event(events[0]) - count += 1 - - logger.info(f"Reprovisioned {count} {event_type.__name__} events") - - async def store_event(event: Event) -> None: - """Store an event in the temporary events table.""" - event_orm = EventORM.load(event) - - await session.execute( - text( - """ - INSERT INTO events_temp (queue, payload, timestamp_utc) - VALUES - (:queue, :payload ::JSONB, :timestamp_utc) - """ - ).bindparams( - queue=event_orm.queue, - payload=json.dumps(event_orm.payload), - timestamp_utc=event_orm.timestamp_utc, - ) - ) - - try: - async with session_maker() as session, session.begin(): - # NOTE: The table should be deleted at the end of the transaction. This is just a safety-net around - # (possible) bugs that a reprovisioning might not get cleared. - await session.execute(text("DROP TABLE IF EXISTS events_temp")) - # NOTE: The temporary table will be deleted once the transaction gets committed/aborted. - await session.execute( - text( - """ - CREATE TEMPORARY TABLE events_temp ON COMMIT DROP - AS - SELECT queue, payload, timestamp_utc FROM events.events WHERE FALSE WITH NO DATA - """ - ) - ) - - start_event = make_event( - message_type="reprovisioning.started", payload=v2.ReprovisioningStarted(id=str(reprovisioning.id)) - ) - await store_event(start_event) - - logger.info("Reprovisioning users") - all_users = user_repo.get_all_users(requested_by=requested_by) - await process_events(all_users, v2.UserAdded) - - all_groups = group_repo.get_all_groups(requested_by=requested_by) - await process_events(all_groups, v2.GroupAdded) - - all_groups_members = authz.get_all_members(ResourceType.group) - await process_events(all_groups_members, v2.GroupMemberAdded) - - all_projects = project_repo.get_all_projects(requested_by=requested_by) - await process_events(all_projects, v2.ProjectCreated) - - all_projects_members = authz.get_all_members(ResourceType.project) - await process_events(all_projects_members, v2.ProjectMemberAdded) - - finish_event = make_event( - message_type="reprovisioning.finished", payload=v2.ReprovisioningFinished(id=str(reprovisioning.id)) - ) - await store_event(finish_event) - - await session.execute( - text( - """ - INSERT INTO events.events (queue, payload, timestamp_utc) - SELECT queue, payload, timestamp_utc - FROM events_temp - """ - ) - ) - except Exception as e: - logger.exception(f"An error occurred during reprovisioning with ID {reprovisioning.id}: {e}") - else: - logger.info(f"Reprovisioning with ID {reprovisioning.id} is successfully finished") - finally: - await reprovisioning_repo.stop() diff --git a/components/renku_data_services/message_queue/db.py b/components/renku_data_services/message_queue/db.py index 44ccc49e9..bc87387fe 100644 --- a/components/renku_data_services/message_queue/db.py +++ b/components/renku_data_services/message_queue/db.py @@ -5,88 +5,15 @@ from collections.abc import Callable from datetime import UTC, datetime -from sanic.log import logger from sqlalchemy import delete, select from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm import Session from renku_data_services import errors +from renku_data_services.app_config import logging from renku_data_services.message_queue import orm as schemas -from renku_data_services.message_queue.interface import IMessageQueue -from renku_data_services.message_queue.models import Event, Reprovisioning +from renku_data_services.message_queue.models import Reprovisioning - -class EventRepository: - """Repository for events.""" - - def __init__( - self, - session_maker: Callable[..., AsyncSession], - message_queue: IMessageQueue, - ) -> None: - self.session_maker = session_maker - self.message_queue: IMessageQueue = message_queue - - async def get_pending_events(self) -> list[schemas.EventORM]: - """Get all pending events.""" - async with self.session_maker() as session: - stmt = select(schemas.EventORM).order_by(schemas.EventORM.timestamp_utc) - events_orm = await session.scalars(stmt) - return list(events_orm.all()) - - async def send_pending_events(self) -> None: - """Get all pending events and send them. - - We lock rows that get sent and keep sending until there are no more events. - """ - n_total_events = 0 - - while True: - async with self.session_maker() as session, session.begin(): - stmt = ( - select(schemas.EventORM) - # lock retrieved rows, skip already locked ones, to deal with concurrency - .with_for_update(skip_locked=True) - .limit(100) - .order_by(schemas.EventORM.timestamp_utc) - ) - result = await session.scalars(stmt) - events_orm = result.all() - - new_events_count = len(events_orm) - if new_events_count == 0: - break - - n_total_events += new_events_count - - for event in events_orm: - try: - await self.message_queue.send_message(event.dump()) - - await session.delete(event) # this has to be done in the same transaction to not get a deadlock - except Exception as e: - logger.warning(f"couldn't send event {event.payload} on queue {event.queue}: {e}") - - if n_total_events > 0: - logger.info(f"sent {n_total_events} events to the message queue") - - async def store_event(self, session: AsyncSession | Session, event: Event) -> int: - """Store an event.""" - event_orm = schemas.EventORM.load(event) - session.add(event_orm) - - return event_orm.id - - async def delete_event(self, id: int) -> None: - """Delete an event.""" - async with self.session_maker() as session, session.begin(): - stmt = delete(schemas.EventORM).where(schemas.EventORM.id == id) - await session.execute(stmt) - - async def delete_all_events(self) -> None: - """Delete all events. This is only used when testing reprovisioning.""" - async with self.session_maker() as session, session.begin(): - await session.execute(delete(schemas.EventORM)) +logger = logging.getLogger(__name__) class ReprovisioningRepository: diff --git a/components/renku_data_services/message_queue/events.py b/components/renku_data_services/message_queue/events.py deleted file mode 100644 index 88c204d53..000000000 --- a/components/renku_data_services/message_queue/events.py +++ /dev/null @@ -1,21 +0,0 @@ -"""Message queue classes.""" - - -class AmbiguousEvent: - """Indicates events that don't have a specific avro model.""" - - -class ProjectMembershipChanged(AmbiguousEvent): - """Event for changes in project members.""" - - -class GroupMembershipChanged(AmbiguousEvent): - """Event for changes in group members.""" - - -class UpdateOrInsertUser(AmbiguousEvent): - """Event for adding or updating users.""" - - -class InsertUserNamespace(AmbiguousEvent): - """Event for adding user namespcaes.""" diff --git a/components/renku_data_services/message_queue/generate_models.py b/components/renku_data_services/message_queue/generate_models.py deleted file mode 100644 index 4b4d3864e..000000000 --- a/components/renku_data_services/message_queue/generate_models.py +++ /dev/null @@ -1,177 +0,0 @@ -"""Script to generate python models from avro schemas. - -The library used here (pyavro_gen) is not very actively maintained and we need to patch quite a few things to get it to -work. -There really isn't a good avro ->python code gen library that works with: -- namespaces -- references between schemas -- outputting somewhere other than the root of a project -Consider this library the least of all evils and we should switch it out as soon as possible. -If https://github.com/marcosschroh/dataclasses-avroschema/issues/552 ever gets adressed, this would be a good -contender. -""" - -import pathlib -import shutil -from collections import OrderedDict - -import pyavro_gen.codewriters.core -import pyavro_gen.generation_classes -import pyavro_gen.modules.fields_collector -import pyavro_gen.schema_and_classes_container -from avro_preprocessor.avro_domain import Avro -from avro_preprocessor.preprocessor_module import PreprocessorModule -from pyavro_gen import generation_classes -from pyavro_gen.codewriters.utils import namespace_name -from pyavro_gen.modules.avsc_schema_dependency_checker import AvscSchemaDependenciesChecker - - -# monkey patch writer to get correct namespaces -def getv(self) -> str: # type: ignore[no-untyped-def] - """Fake getter.""" - return "renku_data_services.message_queue.avro_models" - - -def setv(self, value) -> None: # type: ignore[no-untyped-def] - """Fake setter.""" - pass - - -def deletev(self) -> None: # type: ignore[no-untyped-def] - """Fake delete.""" - pass - - -pyavro_gen.schema_and_classes_container.SchemaAndClassesContainer.output_prefix = property( - getv, setv, deletev, "output_prefix" -) - -original_get_from_name = pyavro_gen.modules.fields_collector.FieldsCollector.get_class_writer_from_name - - -def _patched_get_class_writer_from_name( - self: pyavro_gen.modules.fields_collector.FieldsCollector, - fully_qualified_name: str, - writer_type: generation_classes.GenerationClassesType = generation_classes.GenerationClassesType.RECORD_CLASS, -) -> pyavro_gen.codewriters.core.ClassWriter: - """Patched version that properly handles enum references.""" - if ( - fully_qualified_name in self.writers - and isinstance( - self.writers[fully_qualified_name], - pyavro_gen.generation_classes.GENERATION_CLASSES[ - pyavro_gen.generation_classes.GenerationClassesType.ENUM_CLASS - ], - ) - and writer_type == pyavro_gen.generation_classes.GenerationClassesType.RECORD_CLASS - ): - writer_type = pyavro_gen.generation_classes.GenerationClassesType.ENUM_CLASS - return original_get_from_name(self, fully_qualified_name, writer_type) - - -pyavro_gen.modules.fields_collector.FieldsCollector.get_class_writer_from_name = _patched_get_class_writer_from_name - - -class SchemaFixer(PreprocessorModule): # type: ignore[misc] - """Removes _schema property from enums, which breaks avro serialization.""" - - def __init__(self, schemas) -> None: # type: ignore[no-untyped-def] - super().__init__(schemas) - - self.writers = schemas.output_writers - - self.prefix = schemas.output_prefix - - def process(self) -> None: - """Process all schemas.""" - - for writer in self.writers.values(): - if not isinstance( - writer, - pyavro_gen.generation_classes.GENERATION_CLASSES[ - pyavro_gen.generation_classes.GenerationClassesType.ENUM_CLASS - ], - ): - continue - writer.attributes = [a for a in writer.attributes if a.name != "_schema"] - - -class DependencyChecker(AvscSchemaDependenciesChecker): # type: ignore[misc] - """Fixes dependency checks.""" - - def store_dependencies_of_field(self, node: Avro.Node) -> None: - """Store external_dependencies of other records in a node in a private dict.""" - - if isinstance(node, str) and self.ancestors and "." in node: - anc = self.ancestors[-1].key - if anc == Avro.Type or isinstance(anc, int): - dependent_ancestor = self._find_ancestor() - if dependent_ancestor: - self.record_dependencies_graph.add_edge(dependent_ancestor, node) - - if isinstance(node, OrderedDict) and Avro.Name in node: - if Avro.Namespace in node: - dep = node[Avro.Namespace] + "." + node[Avro.Name] - elif "." in node[Avro.Name]: - dep = node[Avro.Name] - elif Avro.Fields in node or Avro.Symbols in node: - dep = namespace_name(self.current_schema_name) + "." + node[Avro.Name] - else: - return - - dependent_ancestor = self._find_ancestor() - self.record_dependencies_graph.add_edge(dependent_ancestor, dep) - - def process(self) -> None: - """Detects all dependencies among schemas.""" - super().process() - - # sort schemas by dependencies - keys = list(self.schemas.output_writers.keys()) - keys = sorted(keys) - for idx, (record, dependencies) in enumerate(sorted(self.record_dependencies.items())): - if len(dependencies) == 0: - continue - record_index = keys.index(record) - for dep in dependencies: - dep_index = keys.index(dep) - if dep_index <= record_index: - continue - keys[dep_index], keys[record_index] = keys[record_index], keys[dep_index] - record_index = dep_index - - self.schemas.output_writers = OrderedDict((key, self.schemas.output_writers[key]) for key in keys) - - -def generate_schemas() -> None: - """Generate pythons files from avro.""" - - from avro_preprocessor.avro_paths import AvroPaths - from pyavro_gen.generator import AvroGenerator - - root = pathlib.Path(__file__).parent.resolve() - schema_folder = root / "schemas" - models_folder = root / "avro_models" - - generator: AvroGenerator = AvroGenerator( - AvroPaths( - input_path=str(schema_folder), - output_path=str(models_folder), - base_namespace="io.renku", - types_namespace=None, - rpc_namespace=None, - input_schema_file_extension="avsc", - ), - verbose=True, - ) - generator.preprocessing_modules.append(SchemaFixer) - generator.available_preprocessing_modules[SchemaFixer.__name__] = SchemaFixer - generator.preprocessing_modules.append(DependencyChecker) - generator.available_preprocessing_modules[DependencyChecker.__name__] = DependencyChecker - - generator.process(["FieldsCollector", "SchemaFixer", "DependencyChecker", "AvscSchemaDependenciesChecker"]) - # pyavro creates mock classes for tests that we don't need and that have broken imports anyways - shutil.rmtree(root / "avro_models_test", ignore_errors=True) - - -generate_schemas() diff --git a/components/renku_data_services/message_queue/interface.py b/components/renku_data_services/message_queue/interface.py deleted file mode 100644 index 158957541..000000000 --- a/components/renku_data_services/message_queue/interface.py +++ /dev/null @@ -1,13 +0,0 @@ -"""Interface for message queue client.""" - -from typing import Protocol - -from renku_data_services.message_queue.models import Event - - -class IMessageQueue(Protocol): - """Interface for message queue client.""" - - async def send_message(self, event: Event) -> None: - """Send a message on a channel.""" - ... diff --git a/components/renku_data_services/message_queue/models.py b/components/renku_data_services/message_queue/models.py index ca9f427f4..634c52309 100644 --- a/components/renku_data_services/message_queue/models.py +++ b/components/renku_data_services/message_queue/models.py @@ -1,93 +1,10 @@ """Basic models used for communication with the message queue.""" -import glob -import json from dataclasses import dataclass, field from datetime import UTC, datetime -from io import BytesIO -from pathlib import Path -from typing import Any, Self, TypeVar, cast -from dataclasses_avroschema import AvroModel -from fastavro import parse_schema, schemaless_reader, schemaless_writer from ulid import ULID -from renku_data_services.message_queue.avro_models.io.renku.events.v1.header import Header - -_root: Path = Path(__file__).parent.resolve() -_filter = f"{_root}/schemas/**/*.avsc" -_schemas = {} -for file in glob.glob(_filter, recursive=True): - with open(file) as f: - _schema = json.load(f) - if "name" in _schema: - _name = _schema["name"] - _namespace = _schema.get("namespace") - if _namespace: - _name = f"{_namespace}.{_name}" - _schemas[_name] = _schema - - -def _serialize_binary(obj: AvroModel) -> bytes: - """Serialize a message with avro, making sure to use the original schema.""" - schema = parse_schema(schema=json.loads(getattr(obj, "_schema", obj.avro_schema())), named_schemas=_schemas) - fo = BytesIO() - schemaless_writer(fo, schema, obj.asdict()) - return fo.getvalue() - - -TAvro = TypeVar("TAvro", bound=AvroModel) - - -def deserialize_binary(data: bytes, model: type[TAvro]) -> TAvro: - """Deserialize an avro binary message, using the original schema.""" - input_stream = BytesIO(data) - schema = parse_schema(schema=json.loads(getattr(model, "_schema", model.avro_schema())), named_schemas=_schemas) - - payload = schemaless_reader(input_stream, schema, schema) - input_stream.flush() - obj = model.parse_obj(payload) # type: ignore - - return cast(TAvro, obj) - - -def _create_header( - message_type: str, content_type: str = "application/avro+binary", schema_version: str = "2" -) -> Header: - """Create a message header.""" - return Header( - type=message_type, - source="renku-data-services", - dataContentType=content_type, - schemaVersion=schema_version, - time=datetime.now(UTC), - requestId=ULID().hex, - ) - - -@dataclass -class Event: - """An event and the queue it is supposed to be sent to.""" - - queue: str - _payload: dict[str, Any] - - def serialize(self) -> dict[str, Any]: - """Return the event as avro payload.""" - return self._payload - - @classmethod - def create(cls, queue: str, message_type: str, payload: AvroModel) -> Self: - """Create a new event from an avro model.""" - message_id = ULID().hex - headers = _create_header(message_type, schema_version="2").serialize_json() - message: dict[str, Any] = { - "id": message_id, - "headers": headers, - "payload": _serialize_binary(payload), - } - return cls(queue, message) - @dataclass class Reprovisioning: diff --git a/components/renku_data_services/message_queue/orm.py b/components/renku_data_services/message_queue/orm.py index 407bea5e1..f1e86dc4c 100644 --- a/components/renku_data_services/message_queue/orm.py +++ b/components/renku_data_services/message_queue/orm.py @@ -1,17 +1,13 @@ -"""SQLAlchemy schemas for the CRC database.""" +"""SQLAlchemy schemas for the message queue database.""" -import base64 -import json -from copy import deepcopy -from datetime import UTC, datetime -from typing import Any, Optional +from datetime import datetime -from sqlalchemy import JSON, DateTime, Identity, Index, Integer, MetaData, String, text +from sqlalchemy import JSON, DateTime, Index, MetaData, text from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column from ulid import ULID -from renku_data_services.message_queue.models import Event, Reprovisioning +from renku_data_services.message_queue.models import Reprovisioning from renku_data_services.utils.sqlalchemy import ULIDType JSONVariant = JSON().with_variant(JSONB(), "postgresql") @@ -23,58 +19,6 @@ class BaseORM(MappedAsDataclass, DeclarativeBase): metadata = MetaData(schema="events") # Has to match alembic ini section name -class EventORM(BaseORM): - """Event table. - - This table is used to ensure message delivery. - When changes are made to the database, the corresponding event is writte here in the same transaction. - After changes are committed, the event is sent and the entry from this table deleted again. - If any change was stored in the DB but e.g. the service crashed before sending the corresponding event, there - would be a left-over entry here. - On startup, any entry left here is resent and the entry deleted. This can result in duplicate events being sent - and it's up to the receivers to deal with this, but this ensures that an event will be sent at least once. - """ - - __tablename__ = "events" - - id: Mapped[int] = mapped_column(Integer, Identity(always=True), primary_key=True, default=None, init=False) - """Unique id of the event.""" - - timestamp_utc: Mapped[datetime] = mapped_column("timestamp_utc", DateTime(timezone=False), nullable=False) - - queue: Mapped[str] = mapped_column("queue", String()) - """The name of the queue to send the event to.""" - - payload: Mapped[dict[str, Any]] = mapped_column("payload", JSONVariant) - """The message payload.""" - - @classmethod - def load(cls, event: Event) -> "EventORM": - """Create an ORM object from an event.""" - message = event.serialize() - if "payload" in message and isinstance(message["payload"], bytes): - message["payload"] = base64.b64encode(message["payload"]).decode() - now_utc = datetime.now(UTC).replace(tzinfo=None) - return cls(timestamp_utc=now_utc, queue=event.queue, payload=message) - - def dump(self) -> Event: - """Create an event from the ORM object.""" - message = deepcopy(self.payload) - if "payload" in message and isinstance(message["payload"], str): - message["payload"] = base64.b64decode(message["payload"]) - return Event(self.queue, message) - - def get_message_type(self) -> Optional[str]: - """Return the message_type from the payload.""" - headers = self.payload.get("headers", "{}") - headers_json = json.loads(headers) - message_type = str(headers_json.get("type", "")) - if message_type == "": - return None - else: - return message_type - - class ReprovisioningORM(BaseORM): """Reprovisioning table. diff --git a/components/renku_data_services/message_queue/redis_queue.py b/components/renku_data_services/message_queue/redis_queue.py deleted file mode 100644 index 58be9cc7e..000000000 --- a/components/renku_data_services/message_queue/redis_queue.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Message queue implementation for redis streams.""" - -import copy -from collections.abc import Awaitable, Callable -from dataclasses import dataclass -from functools import wraps -from typing import Concatenate, ParamSpec, Protocol, TypeVar - -from dataclasses_avroschema import AvroModel -from redis.asyncio.sentinel import MasterNotFoundError -from sqlalchemy.ext.asyncio import AsyncSession - -from renku_data_services.errors import errors -from renku_data_services.message_queue import events -from renku_data_services.message_queue.config import RedisConfig -from renku_data_services.message_queue.converters import EventConverter -from renku_data_services.message_queue.db import EventRepository -from renku_data_services.message_queue.interface import IMessageQueue -from renku_data_services.message_queue.models import Event - - -class WithMessageQueue(Protocol): - """The protocol required for a class to send messages to a message queue.""" - - @property - def event_repo(self) -> EventRepository: - """Returns the event repository.""" - ... - - -_P = ParamSpec("_P") -_T = TypeVar("_T") -_WithMessageQueue = TypeVar("_WithMessageQueue", bound=WithMessageQueue) -_EventType = TypeVar("_EventType", type[AvroModel], type[events.AmbiguousEvent], covariant=True) - - -def dispatch_message( - event_type: _EventType, -) -> Callable[ - [Callable[Concatenate[_WithMessageQueue, _P], Awaitable[_T]]], - Callable[Concatenate[_WithMessageQueue, _P], Awaitable[_T]], -]: - """Sends a message on the message queue. - - A message is created based on the event type and result of the wrapped method. - Messages are stored in the database in the same transaction as the changed entities, and are sent by a background - job to ensure delivery of messages and prevent messages being sent in case of failing transactions or due to - exceptions. - """ - - def decorator( - f: Callable[Concatenate[_WithMessageQueue, _P], Awaitable[_T]], - ) -> Callable[Concatenate[_WithMessageQueue, _P], Awaitable[_T]]: - @wraps(f) - async def message_wrapper(self: _WithMessageQueue, *args: _P.args, **kwargs: _P.kwargs) -> _T: - session = kwargs.get("session") - if not isinstance(session, AsyncSession): - raise errors.ProgrammingError( - message="The decorator that populates the message queue expects a valid database session " - f"in the keyword arguments instead it got {type(session)}." - ) - result = await f(self, *args, **kwargs) - if result is None: - return result # type: ignore[unreachable] - events = EventConverter.to_events(result, event_type) - - for event in events: - await self.event_repo.store_event(session, event) - return result - - return message_wrapper - - return decorator - - -@dataclass -class RedisQueue(IMessageQueue): - """Redis streams queue implementation.""" - - config: RedisConfig - - async def send_message(self, event: Event) -> None: - """Send a message on a channel.""" - message = copy.copy(event.serialize()) - - try: - await self.config.redis_connection.xadd(event.queue, message) - except MasterNotFoundError: - self.config.reset_redis_connection() # force redis reconnection - await self.config.redis_connection.xadd(event.queue, message) diff --git a/components/renku_data_services/message_queue/schemas/.github/workflows/ci.yml b/components/renku_data_services/message_queue/schemas/.github/workflows/ci.yml deleted file mode 100644 index 4ba0e85bd..000000000 --- a/components/renku_data_services/message_queue/schemas/.github/workflows/ci.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Run CI -on: - pull_request: - branches: -jobs: - test: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - with: - fetch-depth: 0 - - name: Setup Avrodisiac - run: | - curl -L -o avrodisiac.tar.gz https://github.com/SwissDataScienceCenter/avrodisiac/releases/download/v0.1.3/avrodisiac-x86_64-unknown-linux-gnu.tar.gz - tar xf avrodisiac.tar.gz --directory=/tmp/ - - name: Check formatting - run: /tmp/avrodisiac lint . - - name: Check compatibility - run: | - git worktree add /tmp/old ${{ github.event.pull_request.base.ref }} - /tmp/avrodisiac compat /tmp/old . - diff --git a/components/renku_data_services/message_queue/schemas/LICENSE b/components/renku_data_services/message_queue/schemas/LICENSE deleted file mode 100644 index 261eeb9e9..000000000 --- a/components/renku_data_services/message_queue/schemas/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/components/renku_data_services/message_queue/schemas/README.md b/components/renku_data_services/message_queue/schemas/README.md deleted file mode 100644 index d85be3301..000000000 --- a/components/renku_data_services/message_queue/schemas/README.md +++ /dev/null @@ -1,4 +0,0 @@ -# renku-schema -Repository for all message queue schemas - -Folder Structure is `/v/asyncapi.yaml` for AsyncAPI and `/v/events/.avsc` for Avro schemas. diff --git a/components/renku_data_services/message_queue/schemas/common/v1/headers.avsc b/components/renku_data_services/message_queue/schemas/common/v1/headers.avsc deleted file mode 100644 index 506341cae..000000000 --- a/components/renku_data_services/message_queue/schemas/common/v1/headers.avsc +++ /dev/null @@ -1,35 +0,0 @@ -{ - "type": "record", - "name": "Header", - "namespace":"io.renku.events.v1", - "doc":"common headers for messages", - "fields": [ - { - "name": "source", - "type": "string" - }, - { - "name": "type", - "type": "string" - }, - { - "name": "dataContentType", - "type": "string" - }, - { - "name": "schemaVersion", - "type": "string" - }, - { - "name": "time", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - }, - { - "name": "requestId", - "type": "string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/common/v1/visibility.avsc b/components/renku_data_services/message_queue/schemas/common/v1/visibility.avsc deleted file mode 100644 index e92743ea0..000000000 --- a/components/renku_data_services/message_queue/schemas/common/v1/visibility.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "type": "enum", - "name": "Visibility", - "doc": "Visibility setting", - "namespace":"io.renku.events.v1", - "symbols": [ - "PUBLIC", - "PRIVATE" - ] -} diff --git a/components/renku_data_services/message_queue/schemas/header/headers.avsc b/components/renku_data_services/message_queue/schemas/header/headers.avsc deleted file mode 100644 index d545c4aaa..000000000 --- a/components/renku_data_services/message_queue/schemas/header/headers.avsc +++ /dev/null @@ -1,35 +0,0 @@ -{ - "type": "record", - "name": "Header", - "namespace":"io.renku.events", - "doc":"common headers for messages", - "fields": [ - { - "name": "source", - "type": "string" - }, - { - "name": "type", - "type": "string" - }, - { - "name": "dataContentType", - "type": "string" - }, - { - "name": "schemaVersion", - "type": "string" - }, - { - "name": "time", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - }, - { - "name": "requestId", - "type": "string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/project/v1/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/project/v1/asyncapi.yaml deleted file mode 100644 index d581b621f..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/asyncapi.yaml +++ /dev/null @@ -1,92 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Project Events - version: 0.0.1 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - project.created: - publish: - messages: - projectCreated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/ProjectCreated' - traits: - - $ref: '#/components/messageTraits/headers' - project.updated: - publish: - messages: - projectUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/updated.avsc#/ProjectUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - project.removed: - publish: - messages: - projectRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/removed.avsc#/ProjectRemoved' - traits: - - $ref: '#/components/messageTraits/headers' - projectAuth.added: - publish: - messages: - projectAuthAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/authorization_added.avsc#/ProjectAuthorizationAdded' - traits: - - $ref: '#/components/messageTraits/headers' - projectAuth.updated: - publish: - messages: - projectAuthUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/authorization_updated.avsc#/ProjectAuthorizationUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - projectAuth.removed: - publish: - messages: - projectAuthRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/authorization_removed.avsc#/ProjectAuthorizationRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../../common/v1/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_added.avsc b/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_added.avsc deleted file mode 100644 index 68b07e262..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_added.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"ProjectAuthorizationAdded", - "namespace":"io.renku.events.v1", - "doc":"Event raised when an authorization for a project is added for a user", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v1.ProjectMemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_removed.avsc b/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_removed.avsc deleted file mode 100644 index 9e5d3cbeb..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_removed.avsc +++ /dev/null @@ -1,16 +0,0 @@ -{ - "type":"record", - "name":"ProjectAuthorizationRemoved", - "namespace":"io.renku.events.v1", - "doc":"Event raised when an authorization for a project is removed for a user", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_updated.avsc b/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_updated.avsc deleted file mode 100644 index 74e808e1f..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/events/authorization_updated.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"ProjectAuthorizationUpdated", - "namespace":"io.renku.events.v1", - "doc":"Event raised when an authorization for a project is modified", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v1.ProjectMemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/project/v1/events/created.avsc b/components/renku_data_services/message_queue/schemas/project/v1/events/created.avsc deleted file mode 100644 index 8b1fc1c4f..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/events/created.avsc +++ /dev/null @@ -1,58 +0,0 @@ -{ - "type": "record", - "name": "ProjectCreated", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a new project is created", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v1.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "createdBy", - "type": "string" - }, - { - "name": "creationDate", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - } - ] -} \ No newline at end of file diff --git a/components/renku_data_services/message_queue/schemas/project/v1/events/removed.avsc b/components/renku_data_services/message_queue/schemas/project/v1/events/removed.avsc deleted file mode 100644 index 213a35281..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/events/removed.avsc +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "record", - "name": "ProjectRemoved", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a project is removed", - "fields": [ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/project/v1/events/role.avsc b/components/renku_data_services/message_queue/schemas/project/v1/events/role.avsc deleted file mode 100644 index b26e8ae9c..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/events/role.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "type":"enum", - "name":"ProjectMemberRole", - "namespace":"io.renku.events.v1", - "doc":"Access role of a project member", - "symbols":[ - "MEMBER", - "OWNER" - ] -} diff --git a/components/renku_data_services/message_queue/schemas/project/v1/events/updated.avsc b/components/renku_data_services/message_queue/schemas/project/v1/events/updated.avsc deleted file mode 100644 index 6bdf629c4..000000000 --- a/components/renku_data_services/message_queue/schemas/project/v1/events/updated.avsc +++ /dev/null @@ -1,47 +0,0 @@ -{ - "type": "record", - "name": "ProjectUpdated", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a project is updated", - "fields": [ - { - "name":"id", - "type":"string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v1.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/user/v1/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/user/v1/asyncapi.yaml deleted file mode 100644 index 7919f21a7..000000000 --- a/components/renku_data_services/message_queue/schemas/user/v1/asyncapi.yaml +++ /dev/null @@ -1,56 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Project Events - version: 0.0.1 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - user.added: - publish: - messages: - userAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/added.avsc#/UserAdded' - traits: - - $ref: '#/components/messageTraits/headers' - user.updated: - publish: - messages: - userUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/UserUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - user.removed: - publish: - messages: - userRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/UserRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../../common/v1/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/user/v1/events/added.avsc b/components/renku_data_services/message_queue/schemas/user/v1/events/added.avsc deleted file mode 100644 index 0571168ee..000000000 --- a/components/renku_data_services/message_queue/schemas/user/v1/events/added.avsc +++ /dev/null @@ -1,27 +0,0 @@ -{ - "type": "record", - "name":"UserAdded", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a new user is added", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":[ - "null", - "string" - ] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/user/v1/events/removed.avsc b/components/renku_data_services/message_queue/schemas/user/v1/events/removed.avsc deleted file mode 100644 index 90518fd20..000000000 --- a/components/renku_data_services/message_queue/schemas/user/v1/events/removed.avsc +++ /dev/null @@ -1,13 +0,0 @@ - -{ - "type": "record", - "name":"UserRemoved", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a user is removed", - "fields":[ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/user/v1/events/updated.avsc b/components/renku_data_services/message_queue/schemas/user/v1/events/updated.avsc deleted file mode 100644 index 8af8c4b0b..000000000 --- a/components/renku_data_services/message_queue/schemas/user/v1/events/updated.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "record", - "name":"UserUpdated", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a user is updated", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":["null","string"] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/common/headers.avsc b/components/renku_data_services/message_queue/schemas/v1/common/headers.avsc deleted file mode 100644 index 506341cae..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/common/headers.avsc +++ /dev/null @@ -1,35 +0,0 @@ -{ - "type": "record", - "name": "Header", - "namespace":"io.renku.events.v1", - "doc":"common headers for messages", - "fields": [ - { - "name": "source", - "type": "string" - }, - { - "name": "type", - "type": "string" - }, - { - "name": "dataContentType", - "type": "string" - }, - { - "name": "schemaVersion", - "type": "string" - }, - { - "name": "time", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - }, - { - "name": "requestId", - "type": "string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/common/visibility.avsc b/components/renku_data_services/message_queue/schemas/v1/common/visibility.avsc deleted file mode 100644 index e92743ea0..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/common/visibility.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "type": "enum", - "name": "Visibility", - "doc": "Visibility setting", - "namespace":"io.renku.events.v1", - "symbols": [ - "PUBLIC", - "PRIVATE" - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/project/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/v1/project/asyncapi.yaml deleted file mode 100644 index 74edd4441..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/asyncapi.yaml +++ /dev/null @@ -1,92 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Project Events - version: 0.0.1 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - project.created: - publish: - messages: - projectCreated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/ProjectCreated' - traits: - - $ref: '#/components/messageTraits/headers' - project.updated: - publish: - messages: - projectUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/updated.avsc#/ProjectUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - project.removed: - publish: - messages: - projectRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/removed.avsc#/ProjectRemoved' - traits: - - $ref: '#/components/messageTraits/headers' - projectAuth.added: - publish: - messages: - projectAuthAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/authorization_added.avsc#/ProjectAuthorizationAdded' - traits: - - $ref: '#/components/messageTraits/headers' - projectAuth.updated: - publish: - messages: - projectAuthUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/authorization_updated.avsc#/ProjectAuthorizationUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - projectAuth.removed: - publish: - messages: - projectAuthRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/authorization_removed.avsc#/ProjectAuthorizationRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../common/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_added.avsc b/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_added.avsc deleted file mode 100644 index 68b07e262..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_added.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"ProjectAuthorizationAdded", - "namespace":"io.renku.events.v1", - "doc":"Event raised when an authorization for a project is added for a user", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v1.ProjectMemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_removed.avsc b/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_removed.avsc deleted file mode 100644 index 9e5d3cbeb..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_removed.avsc +++ /dev/null @@ -1,16 +0,0 @@ -{ - "type":"record", - "name":"ProjectAuthorizationRemoved", - "namespace":"io.renku.events.v1", - "doc":"Event raised when an authorization for a project is removed for a user", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_updated.avsc b/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_updated.avsc deleted file mode 100644 index 74e808e1f..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/events/authorization_updated.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"ProjectAuthorizationUpdated", - "namespace":"io.renku.events.v1", - "doc":"Event raised when an authorization for a project is modified", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v1.ProjectMemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/project/events/created.avsc b/components/renku_data_services/message_queue/schemas/v1/project/events/created.avsc deleted file mode 100644 index 8b1fc1c4f..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/events/created.avsc +++ /dev/null @@ -1,58 +0,0 @@ -{ - "type": "record", - "name": "ProjectCreated", - "namespace": "io.renku.events.v1", - "doc": "Event raised when a new project is created", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v1.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "createdBy", - "type": "string" - }, - { - "name": "creationDate", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - } - ] -} \ No newline at end of file diff --git a/components/renku_data_services/message_queue/schemas/v1/project/events/removed.avsc b/components/renku_data_services/message_queue/schemas/v1/project/events/removed.avsc deleted file mode 100644 index 213a35281..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/events/removed.avsc +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "record", - "name": "ProjectRemoved", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a project is removed", - "fields": [ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/project/events/role.avsc b/components/renku_data_services/message_queue/schemas/v1/project/events/role.avsc deleted file mode 100644 index b26e8ae9c..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/events/role.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "type":"enum", - "name":"ProjectMemberRole", - "namespace":"io.renku.events.v1", - "doc":"Access role of a project member", - "symbols":[ - "MEMBER", - "OWNER" - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/project/events/updated.avsc b/components/renku_data_services/message_queue/schemas/v1/project/events/updated.avsc deleted file mode 100644 index 6bdf629c4..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/project/events/updated.avsc +++ /dev/null @@ -1,47 +0,0 @@ -{ - "type": "record", - "name": "ProjectUpdated", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a project is updated", - "fields": [ - { - "name":"id", - "type":"string" - }, - { - "name": "name", - "type": "string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v1.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/user/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/v1/user/asyncapi.yaml deleted file mode 100644 index bdfedc59c..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/user/asyncapi.yaml +++ /dev/null @@ -1,56 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Project Events - version: 0.0.1 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - user.added: - publish: - messages: - userAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/added.avsc#/UserAdded' - traits: - - $ref: '#/components/messageTraits/headers' - user.updated: - publish: - messages: - userUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/UserUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - user.removed: - publish: - messages: - userRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/UserRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../common/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/v1/user/events/added.avsc b/components/renku_data_services/message_queue/schemas/v1/user/events/added.avsc deleted file mode 100644 index 0571168ee..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/user/events/added.avsc +++ /dev/null @@ -1,27 +0,0 @@ -{ - "type": "record", - "name":"UserAdded", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a new user is added", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":[ - "null", - "string" - ] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/user/events/removed.avsc b/components/renku_data_services/message_queue/schemas/v1/user/events/removed.avsc deleted file mode 100644 index 90518fd20..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/user/events/removed.avsc +++ /dev/null @@ -1,13 +0,0 @@ - -{ - "type": "record", - "name":"UserRemoved", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a user is removed", - "fields":[ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v1/user/events/updated.avsc b/components/renku_data_services/message_queue/schemas/v1/user/events/updated.avsc deleted file mode 100644 index 8af8c4b0b..000000000 --- a/components/renku_data_services/message_queue/schemas/v1/user/events/updated.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "record", - "name":"UserUpdated", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a user is updated", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":["null","string"] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/v2/asyncapi.yaml deleted file mode 100644 index 576da0fc0..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/asyncapi.yaml +++ /dev/null @@ -1,46 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Search Sync Events - version: 0.0.1 -servers: - redis: - host: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - data_service.all_events: - messages: - syncEvent: - payload: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - oneOf: - - $ref: './user/events/added.avsc#/UserAdded' - - $ref: './user/events/created.avsc#/UserUpdated' - - $ref: './user/events/removed.avsc#/UserRemoved' - - $ref: './group/events/added.avsc#/GroupAdded' - - $ref: './group/events/created.avsc#/GroupUpdated' - - $ref: './group/events/removed.avsc#/GroupRemoved' - - $ref: './group/events/member_added.avsc#/GroupMemberAdded' - - $ref: './group/events/member_added.avsc#/GroupMemberUpdated' - - $ref: './group/events/member_removed.avsc#/GroupMemberRemoved' - - $ref: './project/events/created.avsc#/ProjectCreated' - - $ref: './project/events/updated.avsc#/ProjectUpdated' - - $ref: './project/events/removed.avsc#/ProjectRemoved' - - $ref: './project/events/member_added.avsc#/ProjectMemberAdded' - - $ref: './project/events/member_updated.avsc#/ProjectMemberUpdated' - - $ref: './project/events/member_removed.avsc#/ProjectMemberRemoved' - - $ref: './notify/events/reprovisioning_started.asvc#/ReprovisioningStarted' - - $ref: './notify/events/reprovisioning_finished.asvc#/ReprovisioningFinished' - traits: - - $ref: '#/components/messageTraits/headers' - -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../header/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/v2/common/role.avsc b/components/renku_data_services/message_queue/schemas/v2/common/role.avsc deleted file mode 100644 index 18d89278b..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/common/role.avsc +++ /dev/null @@ -1,11 +0,0 @@ -{ - "type":"enum", - "name":"MemberRole", - "namespace":"io.renku.events.v2", - "doc":"Access role of a member", - "symbols":[ - "OWNER", - "EDITOR", - "VIEWER" - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/common/visibility.avsc b/components/renku_data_services/message_queue/schemas/v2/common/visibility.avsc deleted file mode 100644 index 66c71909e..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/common/visibility.avsc +++ /dev/null @@ -1,10 +0,0 @@ -{ - "type": "enum", - "name": "Visibility", - "doc": "Visibility setting", - "namespace":"io.renku.events.v2", - "symbols": [ - "PUBLIC", - "PRIVATE" - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/group/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/v2/group/asyncapi.yaml deleted file mode 100644 index 21fa6594d..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/group/asyncapi.yaml +++ /dev/null @@ -1,92 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Group Events - version: 0.0.2 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - group.added: - publish: - messages: - groupAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/added.avsc#/GroupAdded' - traits: - - $ref: '#/components/messageTraits/headers' - group.updated: - publish: - messages: - groupUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/GroupUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - group.removed: - publish: - messages: - groupRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/removed.avsc#/GroupRemoved' - traits: - - $ref: '#/components/messageTraits/headers' - memberGroup.added: - publish: - messages: - memberGroupAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/member_added.avsc#/GroupMemberAdded' - traits: - - $ref: '#/components/messageTraits/headers' - memberGroup.updated: - publish: - messages: - memberGroupUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/member_added.avsc#/GroupMemberUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - memberGroup.removed: - publish: - messages: - memberGroupRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/member_removed.avsc#/GroupMemberRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../../header/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/v2/group/events/added.avsc b/components/renku_data_services/message_queue/schemas/v2/group/events/added.avsc deleted file mode 100644 index 74a74cbc9..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/group/events/added.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "record", - "name":"GroupAdded", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a new group is added", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name":"name", - "type":"string" - }, - { - "name": "description", - "type": ["null", "string"] - }, - { - "name":"namespace", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/group/events/member_added.avsc b/components/renku_data_services/message_queue/schemas/v2/group/events/member_added.avsc deleted file mode 100644 index 71ea06d0c..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/group/events/member_added.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"GroupMemberAdded", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a member is added to a group", - "fields":[ - { - "name":"groupId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/group/events/member_removed.avsc b/components/renku_data_services/message_queue/schemas/v2/group/events/member_removed.avsc deleted file mode 100644 index 983d18391..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/group/events/member_removed.avsc +++ /dev/null @@ -1,16 +0,0 @@ -{ - "type":"record", - "name":"GroupMemberRemoved", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a member is removed from a group", - "fields":[ - { - "name":"groupId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/group/events/member_updated.avsc b/components/renku_data_services/message_queue/schemas/v2/group/events/member_updated.avsc deleted file mode 100644 index 4305d2850..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/group/events/member_updated.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"GroupMemberUpdated", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a member is updated in a group", - "fields":[ - { - "name":"groupId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/group/events/removed.avsc b/components/renku_data_services/message_queue/schemas/v2/group/events/removed.avsc deleted file mode 100644 index 605865583..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/group/events/removed.avsc +++ /dev/null @@ -1,13 +0,0 @@ - -{ - "type": "record", - "name":"GroupRemoved", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a group is removed", - "fields":[ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/group/events/updated.avsc b/components/renku_data_services/message_queue/schemas/v2/group/events/updated.avsc deleted file mode 100644 index 01d75954d..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/group/events/updated.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "record", - "name":"GroupUpdated", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a group is updated", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name":"name", - "type":"string" - }, - { - "name": "description", - "type": ["null", "string"] - }, - { - "name":"namespace", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_finished.avsc b/components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_finished.avsc deleted file mode 100644 index 7e3d665cb..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_finished.avsc +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "record", - "name":"ReprovisioningFinished", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a reprovisioning of data events finished", - "fields":[ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_started.avsc b/components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_started.avsc deleted file mode 100644 index 70ed8f0c7..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/notify/events/reprovisioning_started.avsc +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "record", - "name":"ReprovisioningStarted", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a reprovisioning of data events started", - "fields":[ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/project/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/v2/project/asyncapi.yaml deleted file mode 100644 index d1e70e4cf..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/project/asyncapi.yaml +++ /dev/null @@ -1,92 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Project Events - version: 0.0.2 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - project.created: - publish: - messages: - projectCreated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/ProjectCreated' - traits: - - $ref: '#/components/messageTraits/headers' - project.updated: - publish: - messages: - projectUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/updated.avsc#/ProjectUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - project.removed: - publish: - messages: - projectRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/removed.avsc#/ProjectRemoved' - traits: - - $ref: '#/components/messageTraits/headers' - projectMember.added: - publish: - messages: - projectMemberAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/member_added.avsc#/ProjectMemberAdded' - traits: - - $ref: '#/components/messageTraits/headers' - projectMember.updated: - publish: - messages: - projectMemberUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/member_updated.avsc#/ProjectMemberUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - projectMember.removed: - publish: - messages: - projectMemberRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/member_removed.avsc#/ProjectMemberRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../../header/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/v2/project/events/created.avsc b/components/renku_data_services/message_queue/schemas/v2/project/events/created.avsc deleted file mode 100644 index 86e31a008..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/project/events/created.avsc +++ /dev/null @@ -1,62 +0,0 @@ -{ - "type": "record", - "name": "ProjectCreated", - "namespace": "io.renku.events.v2", - "doc": "Event raised when a new project is created", - "fields": [ - { - "name": "id", - "type": "string" - }, - { - "name": "name", - "type": "string" - }, - { - "name":"namespace", - "type":"string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v2.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "createdBy", - "type": "string" - }, - { - "name": "creationDate", - "type": { - "type": "long", - "logicalType": "timestamp-millis" - } - } - ] -} \ No newline at end of file diff --git a/components/renku_data_services/message_queue/schemas/v2/project/events/member_added.avsc b/components/renku_data_services/message_queue/schemas/v2/project/events/member_added.avsc deleted file mode 100644 index 1211e569d..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/project/events/member_added.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"ProjectMemberAdded", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a user is added to a project", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/project/events/member_removed.avsc b/components/renku_data_services/message_queue/schemas/v2/project/events/member_removed.avsc deleted file mode 100644 index be9ce3ae1..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/project/events/member_removed.avsc +++ /dev/null @@ -1,16 +0,0 @@ -{ - "type":"record", - "name":"ProjectMemberRemoved", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a user is removed from a project", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/project/events/member_updated.avsc b/components/renku_data_services/message_queue/schemas/v2/project/events/member_updated.avsc deleted file mode 100644 index 4eceaa0ea..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/project/events/member_updated.avsc +++ /dev/null @@ -1,20 +0,0 @@ -{ - "type":"record", - "name":"ProjectMemberUpdated", - "namespace":"io.renku.events.v2", - "doc":"Event raised when user role on a project is modified", - "fields":[ - { - "name":"projectId", - "type":"string" - }, - { - "name":"userId", - "type":"string" - }, - { - "name": "role", - "type": "io.renku.events.v2.MemberRole" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/project/events/removed.avsc b/components/renku_data_services/message_queue/schemas/v2/project/events/removed.avsc deleted file mode 100644 index a22dd353a..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/project/events/removed.avsc +++ /dev/null @@ -1,12 +0,0 @@ -{ - "type": "record", - "name": "ProjectRemoved", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a project is removed", - "fields": [ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/project/events/updated.avsc b/components/renku_data_services/message_queue/schemas/v2/project/events/updated.avsc deleted file mode 100644 index d0dc55c2a..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/project/events/updated.avsc +++ /dev/null @@ -1,51 +0,0 @@ -{ - "type": "record", - "name": "ProjectUpdated", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a project is updated", - "fields": [ - { - "name":"id", - "type":"string" - }, - { - "name": "name", - "type": "string" - }, - { - "name":"namespace", - "type":"string" - }, - { - "name": "slug", - "type": "string" - }, - { - "name": "repositories", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - }, - { - "name": "visibility", - "type": "io.renku.events.v2.Visibility" - }, - { - "name": "description", - "type": [ - "null", - "string" - ] - }, - { - "name": "keywords", - "type": { - "type": "array", - "items": "string" - }, - "default": [] - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/user/asyncapi.yaml b/components/renku_data_services/message_queue/schemas/v2/user/asyncapi.yaml deleted file mode 100644 index f1673a85e..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/user/asyncapi.yaml +++ /dev/null @@ -1,56 +0,0 @@ -asyncapi: 3.0.0 -info: - title: User Events - version: 0.0.2 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - user.added: - publish: - messages: - userAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/added.avsc#/UserAdded' - traits: - - $ref: '#/components/messageTraits/headers' - user.updated: - publish: - messages: - userUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/UserUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - user.removed: - publish: - messages: - userRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/removed.avsc#/UserRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../../header/headers.avsc#/Header' diff --git a/components/renku_data_services/message_queue/schemas/v2/user/events/added.avsc b/components/renku_data_services/message_queue/schemas/v2/user/events/added.avsc deleted file mode 100644 index 2318c605c..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/user/events/added.avsc +++ /dev/null @@ -1,31 +0,0 @@ -{ - "type": "record", - "name":"UserAdded", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a new user is added", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":[ - "null", - "string" - ] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - }, - { - "name":"namespace", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/user/events/removed.avsc b/components/renku_data_services/message_queue/schemas/v2/user/events/removed.avsc deleted file mode 100644 index 8b3f5d311..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/user/events/removed.avsc +++ /dev/null @@ -1,13 +0,0 @@ - -{ - "type": "record", - "name":"UserRemoved", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a user is removed", - "fields":[ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/message_queue/schemas/v2/user/events/updated.avsc b/components/renku_data_services/message_queue/schemas/v2/user/events/updated.avsc deleted file mode 100644 index 4766d06c8..000000000 --- a/components/renku_data_services/message_queue/schemas/v2/user/events/updated.avsc +++ /dev/null @@ -1,28 +0,0 @@ -{ - "type": "record", - "name":"UserUpdated", - "namespace":"io.renku.events.v2", - "doc":"Event raised when a user is updated", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":["null","string"] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - }, - { - "name":"namespace", - "type":"string" - } - ] -} diff --git a/components/renku_data_services/metrics/__init__.py b/components/renku_data_services/metrics/__init__.py new file mode 100644 index 000000000..247c23a7f --- /dev/null +++ b/components/renku_data_services/metrics/__init__.py @@ -0,0 +1 @@ +"""Metrics service namespace.""" diff --git a/components/renku_data_services/metrics/core.py b/components/renku_data_services/metrics/core.py new file mode 100644 index 000000000..7275fc5ff --- /dev/null +++ b/components/renku_data_services/metrics/core.py @@ -0,0 +1,95 @@ +"""Implementation of staging metrics service.""" + +from renku_data_services.base_models.core import APIUser +from renku_data_services.base_models.metrics import MetricsEvent, MetricsMetadata, MetricsService +from renku_data_services.metrics.db import MetricsRepository +from renku_data_services.metrics.utils import anonymize_user_id + + +class StagingMetricsService(MetricsService): + """A metrics service implementation that stores events in a staging table. + + This service stores metrics events in a database table, which are then processed by a background task that sends + them to the actual metrics service. + """ + + def __init__(self, enabled: bool, metrics_repo: MetricsRepository) -> None: + self.enabled = enabled + self._metrics_repo = metrics_repo + + async def _store_event(self, event: MetricsEvent, user: APIUser, metadata: MetricsMetadata | None = None) -> None: + """Store a metrics event in the staging table.""" + if not self.enabled: + return + + anonymous_user_id = anonymize_user_id(user) + await self._metrics_repo.store_event(event=event.value, anonymous_user_id=anonymous_user_id, metadata=metadata) + + async def session_started(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Store session started event in staging table.""" + metadata["authenticated"] = user.is_authenticated + await self._store_event(MetricsEvent.session_started, user, metadata) + + async def session_resumed(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Store session resumed event in staging table.""" + await self._store_event(MetricsEvent.session_resumed, user, metadata) + + async def session_hibernated(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Store session hibernated event in staging table.""" + await self._store_event(MetricsEvent.session_hibernated, user, metadata) + + async def session_stopped(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Store session stopped event in staging table.""" + metadata["authenticated"] = user.is_authenticated + await self._store_event(MetricsEvent.session_stopped, user, metadata) + + async def session_launcher_created( + self, user: APIUser, environment_kind: str, environment_image_source: str + ) -> None: + """Store session launcher created event in staging table.""" + await self._store_event( + MetricsEvent.session_launcher_created, + user, + {"environment_kind": environment_kind, "environment_image_source": environment_image_source}, + ) + + async def project_created(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Store project created event in staging table.""" + await self._store_event(MetricsEvent.project_created, user, metadata) + + async def code_repo_linked_to_project(self, user: APIUser) -> None: + """Store code repo linked to project event in staging table.""" + await self._store_event(MetricsEvent.code_repo_linked_to_project, user) + + async def data_connector_created(self, user: APIUser) -> None: + """Store data connector created event in staging table.""" + await self._store_event(MetricsEvent.data_connector_created, user) + + async def data_connector_linked(self, user: APIUser) -> None: + """Store data connector linked event in staging table.""" + await self._store_event(MetricsEvent.data_connector_linked, user) + + async def project_member_added(self, user: APIUser) -> None: + """Store project member added event in staging table.""" + await self._store_event(MetricsEvent.project_member_added, user) + + async def group_created(self, user: APIUser) -> None: + """Store group created event in staging table.""" + await self._store_event(MetricsEvent.group_created, user) + + async def group_member_added(self, user: APIUser) -> None: + """Store group member added event in staging table.""" + await self._store_event(MetricsEvent.group_member_added, user) + + async def search_queried(self, user: APIUser) -> None: + """Store search queried event in staging table.""" + metadata: MetricsMetadata = {"authenticated": user.is_authenticated} + await self._store_event(MetricsEvent.search_queried, user, metadata) + + async def user_requested_session_launch(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send event about user requesting session launch.""" + await self._store_event(MetricsEvent.user_requested_session_launch, user, metadata) + + async def user_requested_session_resume(self, user: APIUser, metadata: MetricsMetadata) -> None: + """Send event about user requesting session resume.""" + await self._store_event(MetricsEvent.user_requested_session_resume, user, metadata) diff --git a/components/renku_data_services/metrics/db.py b/components/renku_data_services/metrics/db.py new file mode 100644 index 000000000..ff47162f3 --- /dev/null +++ b/components/renku_data_services/metrics/db.py @@ -0,0 +1,39 @@ +"""Repository for the metrics staging table.""" + +from collections.abc import AsyncGenerator, Callable +from typing import Any + +from sqlalchemy import delete, select +from sqlalchemy.ext.asyncio import AsyncSession +from ulid import ULID + +from renku_data_services.metrics.orm import MetricsORM + + +class MetricsRepository: + """Repository for the metrics staging table.""" + + def __init__(self, session_maker: Callable[..., AsyncSession]) -> None: + """Initialize a new metrics repository.""" + self.session_maker = session_maker + + async def store_event(self, event: str, anonymous_user_id: str, metadata: dict[str, Any] | None) -> None: + """Store a metrics event in the staging table.""" + metric_orm = MetricsORM(event=event, anonymous_user_id=anonymous_user_id, metadata_=metadata) + + async with self.session_maker() as session, session.begin(): + session.add(metric_orm) + + async def get_unprocessed_metrics(self) -> AsyncGenerator[MetricsORM, None]: + """Get unprocessed metrics events from the staging table.""" + async with self.session_maker() as session: + result = await session.stream_scalars(select(MetricsORM)) + async for metrics in result: + yield metrics + + async def delete_processed_metrics(self, metrics_ids: list[ULID]) -> None: + """Delete metrics events from the staging table.""" + if not metrics_ids: + return + async with self.session_maker() as session, session.begin(): + await session.execute(delete(MetricsORM).where(MetricsORM.id.in_(metrics_ids))) diff --git a/components/renku_data_services/metrics/orm.py b/components/renku_data_services/metrics/orm.py new file mode 100644 index 000000000..7ea87364c --- /dev/null +++ b/components/renku_data_services/metrics/orm.py @@ -0,0 +1,45 @@ +"""SQLAlchemy schemas for the metrics database.""" + +from datetime import datetime +from typing import Any, Optional + +from sqlalchemy import JSON, DateTime, MetaData, String, func, text +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column +from ulid import ULID + +from renku_data_services.utils.sqlalchemy import ULIDType + +JSONVariant = JSON().with_variant(JSONB(), "postgresql") + + +class BaseORM(MappedAsDataclass, DeclarativeBase): + """Base class for all ORM classes.""" + + metadata = MetaData(schema="metrics") + + +class MetricsORM(BaseORM): + """Metrics staging table. + + Events are stored in this table and then processed by a background task that sends them to the actual metrics + service. + """ + + __tablename__ = "metrics" + + id: Mapped[ULID] = mapped_column( + "id", ULIDType, server_default=text("generate_ulid()"), primary_key=True, init=False + ) + + event: Mapped[str] = mapped_column("event", String(), nullable=False) + """The type of the metrics (e.g., session_started, project_created, etc.).""" + + anonymous_user_id: Mapped[str] = mapped_column("anonymous_user_id", String(), nullable=False) + + timestamp: Mapped[datetime] = mapped_column( + "timestamp", DateTime(timezone=True), init=False, server_default=func.now(), nullable=False + ) + + metadata_: Mapped[Optional[dict[str, Any]]] = mapped_column("metadata", JSONVariant, default=None, nullable=True) + """The metrics metadata.""" diff --git a/components/renku_data_services/message_queue/avro_models/io/__init__.py b/components/renku_data_services/metrics/py.typed similarity index 100% rename from components/renku_data_services/message_queue/avro_models/io/__init__.py rename to components/renku_data_services/metrics/py.typed diff --git a/components/renku_data_services/metrics/utils.py b/components/renku_data_services/metrics/utils.py new file mode 100644 index 000000000..096e7584a --- /dev/null +++ b/components/renku_data_services/metrics/utils.py @@ -0,0 +1,12 @@ +"""Utility functions.""" + +import hashlib + +from renku_data_services.base_models.core import APIUser + + +def anonymize_user_id(user: APIUser) -> str: + """Anonymize a user's id.""" + return ( + hashlib.md5(user.id.encode("utf-8"), usedforsecurity=False).hexdigest() if user.id is not None else "anonymous" + ) diff --git a/components/renku_data_services/migrations/core.py b/components/renku_data_services/migrations/core.py index 0fd7fafcf..d948e3f46 100644 --- a/components/renku_data_services/migrations/core.py +++ b/components/renku_data_services/migrations/core.py @@ -15,7 +15,7 @@ def run_migrations_for_app(name: str, revision: str = "heads") -> None: """Perform migrations for app `name`. From: https://alembic.sqlalchemy.org/en/latest/cookbook.html#programmatic-api-use-connection-sharing-with-asyncio - """ # noqa: E501 + """ cfg = get_alembic_config(name) command.upgrade(cfg, revision) diff --git a/components/renku_data_services/migrations/env.py b/components/renku_data_services/migrations/env.py index 70370b961..08c5d9f17 100644 --- a/components/renku_data_services/migrations/env.py +++ b/components/renku_data_services/migrations/env.py @@ -1,31 +1,30 @@ """Database migrations for Alembic.""" -from logging.config import dictConfig - from renku_data_services.authz.orm import BaseORM as authz from renku_data_services.connected_services.orm import BaseORM as connected_services from renku_data_services.crc.orm import BaseORM as crc from renku_data_services.data_connectors.orm import BaseORM as data_connectors +from renku_data_services.k8s_watcher import BaseORM as k8s_cache from renku_data_services.message_queue.orm import BaseORM as events -from renku_data_services.migrations.utils import logging_config, run_migrations +from renku_data_services.metrics.orm import BaseORM as metrics +from renku_data_services.migrations.utils import run_migrations from renku_data_services.namespace.orm import BaseORM as namespaces from renku_data_services.platform.orm import BaseORM as platform from renku_data_services.project.orm import BaseORM as project +from renku_data_services.search.orm import BaseORM as search from renku_data_services.secrets.orm import BaseORM as secrets from renku_data_services.session.orm import BaseORM as sessions from renku_data_services.storage.orm import BaseORM as storage from renku_data_services.users.orm import BaseORM as users -# Interpret the config file for Python logging. -# This line sets up loggers basically. -dictConfig(logging_config) - all_metadata = [ authz.metadata, crc.metadata, connected_services.metadata, data_connectors.metadata, events.metadata, + k8s_cache.metadata, + metrics.metadata, namespaces.metadata, platform.metadata, project.metadata, @@ -33,6 +32,7 @@ sessions.metadata, storage.metadata, users.metadata, + search.metadata, ] run_migrations(all_metadata) diff --git a/components/renku_data_services/migrations/utils.py b/components/renku_data_services/migrations/utils.py index 8de33eced..5065b1a05 100644 --- a/components/renku_data_services/migrations/utils.py +++ b/components/renku_data_services/migrations/utils.py @@ -200,19 +200,3 @@ def __del__(self) -> None: self._thread.join() del self._loop del self._thread - - -logging_config: dict = { - "version": 1, - "disable_existing_loggers": False, - "loggers": { - "alembic": { - "level": "INFO", - "qualname": "alembic", - }, - "sqlalchemy": { - "level": "WARN", - "qualname": "sqlalchemy.engine", - }, - }, -} diff --git a/components/renku_data_services/migrations/versions/042eeb50cd8e_add_dismmiss_migration_banner_user_.py b/components/renku_data_services/migrations/versions/042eeb50cd8e_add_dismmiss_migration_banner_user_.py new file mode 100644 index 000000000..1f707100b --- /dev/null +++ b/components/renku_data_services/migrations/versions/042eeb50cd8e_add_dismmiss_migration_banner_user_.py @@ -0,0 +1,32 @@ +"""add dismmiss migration banner user preference + +Revision ID: 042eeb50cd8e +Revises: ee719a5fabf6 +Create Date: 2025-04-03 08:14:23.416178 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "042eeb50cd8e" +down_revision = "ee719a5fabf6" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "user_preferences", + sa.Column("show_project_migration_banner", sa.Boolean(), nullable=False, server_default=sa.text("true")), + schema="users", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("user_preferences", "show_project_migration_banner", schema="users") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/0c205e28f053_support_env_variables_in_sessions.py b/components/renku_data_services/migrations/versions/0c205e28f053_support_env_variables_in_sessions.py new file mode 100644 index 000000000..1e704a332 --- /dev/null +++ b/components/renku_data_services/migrations/versions/0c205e28f053_support_env_variables_in_sessions.py @@ -0,0 +1,37 @@ +"""support env variables in sessions + +Revision ID: 0c205e28f053 +Revises: ca87e5b43a44 +Create Date: 2025-03-31 09:12:08.245036 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "0c205e28f053" +down_revision = "ca87e5b43a44" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "launchers", + sa.Column( + "env_variables", + sa.JSON().with_variant(postgresql.JSONB(astext_type=sa.Text()), "postgresql"), + nullable=True, + ), + schema="sessions", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("launchers", "env_variables", schema="sessions") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/1d2f64a405aa_create_the_cluster_table.py b/components/renku_data_services/migrations/versions/1d2f64a405aa_create_the_cluster_table.py new file mode 100644 index 000000000..cee152258 --- /dev/null +++ b/components/renku_data_services/migrations/versions/1d2f64a405aa_create_the_cluster_table.py @@ -0,0 +1,71 @@ +"""create the cluster table + +Revision ID: 1d2f64a405aa +Revises: 8d67347dcbec +Create Date: 2025-03-12 14:48:19.156390 + +""" + +import sqlalchemy as sa +from alembic import op + +from renku_data_services.utils.sqlalchemy import ULIDType + +# revision identifiers, used by Alembic. +revision = "1d2f64a405aa" +down_revision = "8d67347dcbec" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "clusters", + sa.Column("id", ULIDType(), nullable=False), + sa.Column("name", sa.String(length=40), nullable=False), + sa.Column("config_name", sa.String(length=40), nullable=False), + sa.PrimaryKeyConstraint("id"), + schema="resource_pools", + ) + op.create_index(op.f("ix_resource_pools_clusters_name"), "clusters", ["name"], unique=True, schema="resource_pools") + op.create_index( + op.f("ix_resource_pools_clusters_config_name"), + "clusters", + ["config_name"], + unique=True, + schema="resource_pools", + ) + op.add_column("resource_pools", sa.Column("cluster_id", ULIDType(), nullable=True), schema="resource_pools") + op.create_index( + op.f("ix_resource_pools_resource_pools_cluster_id"), + "resource_pools", + ["cluster_id"], + unique=False, + schema="resource_pools", + ) + op.create_foreign_key( + "resource_pools_resource_pools_cluster_id_fk", + "resource_pools", + "clusters", + ["cluster_id"], + ["id"], + source_schema="resource_pools", + referent_schema="resource_pools", + ondelete="SET NULL", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "resource_pools_resource_pools_cluster_id_fk", "resource_pools", schema="resource_pools", type_="foreignkey" + ) + op.drop_index( + op.f("ix_resource_pools_resource_pools_cluster_id"), table_name="resource_pools", schema="resource_pools" + ) + op.drop_column("resource_pools", "cluster_id", schema="resource_pools") + op.drop_index(op.f("ix_resource_pools_clusters_name"), table_name="clusters", schema="resource_pools") + op.drop_table("clusters", schema="resource_pools") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/1e296d744eac_update_for_custom_environment_build.py b/components/renku_data_services/migrations/versions/1e296d744eac_update_for_custom_environment_build.py new file mode 100644 index 000000000..d36708694 --- /dev/null +++ b/components/renku_data_services/migrations/versions/1e296d744eac_update_for_custom_environment_build.py @@ -0,0 +1,70 @@ +"""Update for custom environment build + +Revision ID: 1e296d744eac +Revises: 64edf7ac0de0 +Create Date: 2025-02-03 23:09:31.954635 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "1e296d744eac" +down_revision = "64edf7ac0de0" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "build_parameters", + sa.Column("id", sa.String(length=26), nullable=False), + sa.Column("repository", sa.String(length=500), nullable=False), + sa.Column("builder_variant", sa.String(length=99), nullable=False), + sa.Column("frontend_variant", sa.String(length=99), nullable=False), + sa.PrimaryKeyConstraint("id"), + schema="sessions", + ) + + op.execute("CREATE TYPE environmentimagesource AS ENUM ('image', 'build')") + + op.add_column( + "environments", + sa.Column( + "environment_image_source", + sa.Enum("image", "build", name="environmentimagesource"), + nullable=False, + server_default="image", + ), + schema="sessions", + ) + op.add_column( + "environments", + sa.Column("build_parameters_id", sa.String(length=26), nullable=True, server_default=None), + schema="sessions", + ) + op.create_foreign_key( + "environments_build_parameters_id_fk", + "environments", + "build_parameters", + ["build_parameters_id"], + ["id"], + ondelete="CASCADE", + source_schema="sessions", + referent_schema="sessions", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint("environments_build_parameters_id_fk", "environments", schema="sessions", type_="foreignkey") + op.drop_column("environments", "build_parameters_id", schema="sessions") + op.drop_column("environments", "environment_image_source", schema="sessions") + + op.execute("DROP TYPE environmentimagesource") + + op.drop_table("build_parameters", schema="sessions") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/239854e7ea77_add_non_public_read_relation.py b/components/renku_data_services/migrations/versions/239854e7ea77_add_non_public_read_relation.py new file mode 100644 index 000000000..d69c41354 --- /dev/null +++ b/components/renku_data_services/migrations/versions/239854e7ea77_add_non_public_read_relation.py @@ -0,0 +1,37 @@ +"""Add non-public-read relation + +Revision ID: 239854e7ea77 +Revises: 75c83dd9d619 +Create Date: 2025-01-17 14:34:47.305393 + +""" + +from renku_data_services.app_config import logging +from renku_data_services.authz.config import AuthzConfig +from renku_data_services.authz.schemas import v5 + +logger = logging.getLogger(__name__) + +# revision identifiers, used by Alembic. +revision = "239854e7ea77" +down_revision = "75c83dd9d619" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + config = AuthzConfig.from_env() + client = config.authz_client() + responses = v5.upgrade(client) + logger.info( + f"Finished upgrading the Authz schema to version 5 in Alembic revision {revision}, response: {responses}" + ) + + +def downgrade() -> None: + config = AuthzConfig.from_env() + client = config.authz_client() + responses = v5.downgrade(client) + logger.info( + f"Finished downgrading the Authz schema from version 5 in Alembic revision {revision}, response: {responses}" + ) diff --git a/components/renku_data_services/migrations/versions/2d3bf387ef9a_set_check_constraint_for_data_.py b/components/renku_data_services/migrations/versions/2d3bf387ef9a_set_check_constraint_for_data_.py new file mode 100644 index 000000000..6d879e6fb --- /dev/null +++ b/components/renku_data_services/migrations/versions/2d3bf387ef9a_set_check_constraint_for_data_.py @@ -0,0 +1,33 @@ +"""set check constraint for data connectors and projects in entity slugs table + +Revision ID: 2d3bf387ef9a +Revises: 322f8c5f4eb0 +Create Date: 2025-03-19 09:51:46.505682 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "2d3bf387ef9a" +down_revision = "322f8c5f4eb0" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_check_constraint( + "one_or_both_project_id_or_group_id_are_set", + "entity_slugs", + "(project_id IS NOT NULL) OR (data_connector_id IS NOT NULL)", + schema="common", + ) + + +def downgrade() -> None: + op.drop_constraint( + "one_or_both_project_id_or_group_id_are_set", + "entity_slugs", + schema="common", + type_="check", + ) diff --git a/components/renku_data_services/migrations/versions/322f8c5f4eb0_migrate_slug_entity_tables.py b/components/renku_data_services/migrations/versions/322f8c5f4eb0_migrate_slug_entity_tables.py new file mode 100644 index 000000000..4cb7381d6 --- /dev/null +++ b/components/renku_data_services/migrations/versions/322f8c5f4eb0_migrate_slug_entity_tables.py @@ -0,0 +1,86 @@ +"""migrate slug entity tables + +Revision ID: 322f8c5f4eb0 +Revises: b0a52ff8335a +Create Date: 2025-03-11 22:19:04.256797 + +""" + +import sqlalchemy as sa +from alembic import op + +from renku_data_services.utils.sqlalchemy import ULIDType + +# revision identifiers, used by Alembic. +revision = "322f8c5f4eb0" +down_revision = "b0a52ff8335a" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "entity_slugs_old", + sa.Column("project_id", ULIDType(), nullable=True), + schema="common", + ) + op.add_column( + "entity_slugs_old", + sa.Column("data_connector_id", ULIDType(), nullable=True), + schema="common", + ) + op.create_index( + op.f("ix_common_entity_slugs_old_data_connector_id"), + "entity_slugs_old", + ["data_connector_id"], + unique=False, + schema="common", + ) + op.create_index( + op.f("ix_common_entity_slugs_old_project_id"), "entity_slugs_old", ["project_id"], unique=False, schema="common" + ) + op.create_foreign_key( + "entity_slugs_data_connector_id_fk", + "entity_slugs_old", + "data_connectors", + ["data_connector_id"], + ["id"], + source_schema="common", + referent_schema="storage", + ondelete="CASCADE", + ) + op.create_foreign_key( + "entity_slugs_project_id_fk", + "entity_slugs_old", + "projects", + ["project_id"], + ["id"], + source_schema="common", + referent_schema="projects", + ondelete="CASCADE", + ) + # ### end Alembic commands ### + # Carry over the project ID and data connector ID from the current table. + # We need these because we need to distinguish beteween a data connector slug being migrated + # from /user1/dc1 to /user2/project3/dc1, in this case the actual slug is the same but the + # path to it from /user1 to /user2/project3 has changed. So the entry in the OldEntitySlug + # will have a namespace_id, data_connector_id and a slug and point to a slug in the EntitySlug table with + # a namespace_id, project_id, data_connector_id and slug + op.execute( + "UPDATE common.entity_slugs_old " + "SET project_id = common.entity_slugs.project_id, data_connector_id = common.entity_slugs.data_connector_id " + "FROM common.entity_slugs " + "WHERE common.entity_slugs_old.latest_slug_id = common.entity_slugs.id " + ) + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint("entity_slugs_project_id_fk", "entity_slugs_old", schema="common", type_="foreignkey") + op.drop_constraint("entity_slugs_data_connector_id_fk", "entity_slugs_old", schema="common", type_="foreignkey") + op.drop_index(op.f("ix_common_entity_slugs_old_project_id"), table_name="entity_slugs_old", schema="common") + op.drop_index(op.f("ix_common_entity_slugs_old_data_connector_id"), table_name="entity_slugs_old", schema="common") + op.drop_column("entity_slugs_old", "data_connector_id", schema="common") + op.drop_column("entity_slugs_old", "project_id", schema="common") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/450ae3930996_create_initial_global_environments.py b/components/renku_data_services/migrations/versions/450ae3930996_create_initial_global_environments.py new file mode 100644 index 000000000..916d11596 --- /dev/null +++ b/components/renku_data_services/migrations/versions/450ae3930996_create_initial_global_environments.py @@ -0,0 +1,151 @@ +"""bootstrap initial global environments + +Mainly used for CI deployments so they have a envs for testing. + +Revision ID: 450ae3930996 +Revises: d71f0f795d30 +Create Date: 2025-02-07 02:34:53.408066 + +""" + +from dataclasses import dataclass + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects.postgresql import JSONB + +from renku_data_services.app_config import logging +from renku_data_services.base_models.core import InternalServiceAdmin + +logger = logging.getLogger(__name__) + +JSONVariant = sa.JSON().with_variant(JSONB(), "postgresql") +# revision identifiers, used by Alembic. +revision = "450ae3930996" +down_revision = "d71f0f795d30" +branch_labels = None +depends_on = None + + +@dataclass +class Environment: + name: str + container_image: str + default_url: str + port: int = 8888 + description: str = "" + working_directory: str | None = None + mount_directory: str | None = None + uid: int = 1000 + gid: int = 1000 + args: list[str] | None = None + command: list[str] | None = None + + +GLOBAL_ENVIRONMENTS = [ + Environment( + name="Python/Jupyter", + description="Standard python environment", + container_image="renku/renkulab-py:latest", + default_url="/lab", + working_directory="/home/jovyan/work", + mount_directory="/home/jovyan/work", + port=8888, + uid=1000, + gid=100, + command=["sh", "-c"], + args=[ + '/entrypoint.sh jupyter server --ServerApp.ip=0.0.0.0 --ServerApp.port=8888 --ServerApp.base_url=$RENKU_BASE_URL_PATH --ServerApp.token="" --ServerApp.password="" --ServerApp.allow_remote_access=true --ContentsManager.allow_hidden=true --ServerApp.allow_origin=* --ServerApp.root_dir="/home/jovyan/work"' + ], + ), + Environment( + name="Rstudio", + description="Standard R environment", + container_image="renku/renkulab-r:latest", + default_url="/rstudio", + working_directory="/home/jovyan/work", + mount_directory="/home/jovyan/work", + port=8888, + uid=1000, + gid=100, + command=["sh", "-c"], + args=[ + '/entrypoint.sh jupyter server --ServerApp.ip=0.0.0.0 --ServerApp.port=8888 --ServerApp.base_url=$RENKU_BASE_URL_PATH --ServerApp.token="" --ServerApp.password="" --ServerApp.allow_remote_access=true --ContentsManager.allow_hidden=true --ServerApp.allow_origin=* --ServerApp.root_dir="/home/jovyan/work"' + ], + ), +] + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + try: + connection = op.get_bind() + + logger.info("creating global environments") + env_stmt = sa.select(sa.column("id", type_=sa.String)).select_from(sa.table("environments", schema="sessions")) + existing_envs = connection.execute(env_stmt).all() + if existing_envs: + logger.info("skipping environment creation as there already are existing environments") + return + for env in GLOBAL_ENVIRONMENTS: + op.execute( + sa.text( + """INSERT INTO sessions.environments( + id, + name, description, + created_by_id, + creation_date, + container_image, + default_url, + port, + working_directory, + mount_directory, + uid, + gid, + args, + command, + environment_kind + )VALUES ( + generate_ulid(), + :name, + :description, + :created_by_id, + now(), + :container_image, + :default_url, + :port, + :working_directory, + :mount_directory, + :uid, + :gid, + :args, + :command, + 'GLOBAL' + )""" # nosec: B608 + ).bindparams( + sa.bindparam("name", value=env.name, type_=sa.Text), + sa.bindparam("description", value=env.description, type_=sa.Text), + sa.bindparam("created_by_id", value=InternalServiceAdmin.id, type_=sa.Text), + sa.bindparam("container_image", value=env.container_image, type_=sa.Text), + sa.bindparam("default_url", value=env.default_url, type_=sa.Text), + sa.bindparam("port", value=env.port, type_=sa.Integer), + sa.bindparam("working_directory", value=env.working_directory, type_=sa.Text), + sa.bindparam("mount_directory", value=env.mount_directory, type_=sa.Text), + sa.bindparam("uid", value=env.uid, type_=sa.Integer), + sa.bindparam("gid", value=env.gid, type_=sa.Integer), + sa.bindparam("args", value=env.args, type_=JSONVariant), + sa.bindparam("command", value=env.command, type_=JSONVariant), + ) + ) + logger.info(f"created global environment {env.name}") + + except Exception: + logger.exception("creation of intial global environments failed") + + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + pass + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/483af0d70cf4_migrate_authz_schema_to_v6.py b/components/renku_data_services/migrations/versions/483af0d70cf4_migrate_authz_schema_to_v6.py new file mode 100644 index 000000000..7ed6c3569 --- /dev/null +++ b/components/renku_data_services/migrations/versions/483af0d70cf4_migrate_authz_schema_to_v6.py @@ -0,0 +1,37 @@ +"""Migrate authz schema to v6 + +Revision ID: 559b1fc46cfe +Revises: 71ef5efe740f +Create Date: 2025-01-22 10:37:40.218992 + +""" + +from renku_data_services.app_config import logging +from renku_data_services.authz.config import AuthzConfig +from renku_data_services.authz.schemas import v6 + +logger = logging.getLogger(__name__) + +# revision identifiers, used by Alembic. +revision = "483af0d70cf4" +down_revision = "559b1fc46cfe" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + config = AuthzConfig.from_env() + client = config.authz_client() + responses = v6.upgrade(client) + logger.info( + f"Finished upgrading the Authz schema to version 6 in Alembic revision {revision}, response: {responses}" + ) + + +def downgrade() -> None: + config = AuthzConfig.from_env() + client = config.authz_client() + responses = v6.downgrade(client) + logger.info( + f"Finished downgrading the Authz schema from version 6 in Alembic revision {revision}, response: {responses}" + ) diff --git a/components/renku_data_services/migrations/versions/5335b8548c79_add_authorization_for_data_connectors.py b/components/renku_data_services/migrations/versions/5335b8548c79_add_authorization_for_data_connectors.py index 4dbd9323e..32ec3887b 100644 --- a/components/renku_data_services/migrations/versions/5335b8548c79_add_authorization_for_data_connectors.py +++ b/components/renku_data_services/migrations/versions/5335b8548c79_add_authorization_for_data_connectors.py @@ -6,14 +6,15 @@ """ -import logging - import sqlalchemy as sa from alembic import op +from renku_data_services.app_config import logging from renku_data_services.authz.config import AuthzConfig from renku_data_services.authz.schemas import generate_v4 +logger = logging.getLogger(__name__) + # revision identifiers, used by Alembic. revision = "5335b8548c79" down_revision = "3cf2adf9896b" @@ -36,7 +37,7 @@ def upgrade() -> None: v4 = generate_v4(project_ids) responses = v4.upgrade(client) tx.commit() - logging.info( + logger.info( f"Finished upgrading the Authz schema to version 4 in Alembic revision {revision}, response: {responses}" ) @@ -56,6 +57,6 @@ def downgrade() -> None: v4 = generate_v4(project_ids) responses = v4.downgrade(client) tx.commit() - logging.info( + logger.info( f"Finished downgrading the Authz schema from version 4 in Alembic revision {revision}, response: {responses}" ) diff --git a/components/renku_data_services/migrations/versions/559b1fc46cfe_add_project_migrations.py b/components/renku_data_services/migrations/versions/559b1fc46cfe_add_project_migrations.py new file mode 100644 index 000000000..492efb9ca --- /dev/null +++ b/components/renku_data_services/migrations/versions/559b1fc46cfe_add_project_migrations.py @@ -0,0 +1,47 @@ +"""add project migrations + +Revision ID: 559b1fc46cfe +Revises: 71ef5efe740f +Create Date: 2025-03-03 13:58:07.450665 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "559b1fc46cfe" +down_revision = "71ef5efe740f" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "project_migrations", + sa.Column("id", sa.String(length=26), nullable=False), + sa.Column("project_v1_id", sa.Integer(), nullable=False), + sa.Column("project_id", sa.String(length=26), nullable=False), + sa.Column("launcher_id", sa.String(length=26), nullable=True), + sa.ForeignKeyConstraint(["project_id"], ["projects.projects.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("project_v1_id"), + sa.UniqueConstraint("project_v1_id", name="uq_project_v1_id"), + schema="projects", + ) + op.create_index( + op.f("ix_projects_project_migrations_project_id"), + "project_migrations", + ["project_id"], + unique=False, + schema="projects", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_projects_project_migrations_project_id"), table_name="project_migrations", schema="projects") + op.drop_table("project_migrations", schema="projects") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/64edf7ac0de0_create_search_update_table.py b/components/renku_data_services/migrations/versions/64edf7ac0de0_create_search_update_table.py new file mode 100644 index 000000000..06320c4c5 --- /dev/null +++ b/components/renku_data_services/migrations/versions/64edf7ac0de0_create_search_update_table.py @@ -0,0 +1,48 @@ +"""create search update table + +Revision ID: 64edf7ac0de0 +Revises: 239854e7ea77 +Create Date: 2025-02-20 11:55:42.824506 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +from renku_data_services.utils.sqlalchemy import ULIDType + +# revision identifiers, used by Alembic. +revision = "64edf7ac0de0" +down_revision = "239854e7ea77" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "search_updates", + sa.Column("id", ULIDType(), server_default=sa.text("generate_ulid()"), nullable=False), + sa.Column("entity_id", sa.String(length=100), nullable=False), + sa.Column("entity_type", sa.String(length=100), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column( + "payload", sa.JSON().with_variant(postgresql.JSONB(astext_type=sa.Text()), "postgresql"), nullable=False + ), + sa.Column("state", sa.Enum("Locked", "Failed", name="recordstate"), nullable=True), + sa.PrimaryKeyConstraint("id"), + schema="events", + ) + op.create_index( + op.f("ix_events_search_updates_entity_id"), "search_updates", ["entity_id"], unique=True, schema="events" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_events_search_updates_entity_id"), table_name="search_updates", schema="events") + op.drop_table("search_updates", schema="events") + op.execute("drop type if exists recordstate") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/6538ba654104_authz_add_public_viewer_to_group.py b/components/renku_data_services/migrations/versions/6538ba654104_authz_add_public_viewer_to_group.py index adea12bdf..3af6201a3 100644 --- a/components/renku_data_services/migrations/versions/6538ba654104_authz_add_public_viewer_to_group.py +++ b/components/renku_data_services/migrations/versions/6538ba654104_authz_add_public_viewer_to_group.py @@ -6,11 +6,12 @@ """ -import logging - +from renku_data_services.app_config import logging from renku_data_services.authz.config import AuthzConfig from renku_data_services.authz.schemas import v3 +logger = logging.getLogger(__name__) + # revision identifiers, used by Alembic. revision = "6538ba654104" down_revision = "57dfd69ea814" @@ -22,7 +23,7 @@ def upgrade() -> None: config = AuthzConfig.from_env() client = config.authz_client() responses = v3.upgrade(client) - logging.info( + logger.info( f"Finished upgrading the Authz schema to version 3 in Alembic revision {revision}, response: {responses}" ) @@ -31,6 +32,6 @@ def downgrade() -> None: config = AuthzConfig.from_env() client = config.authz_client() responses = v3.downgrade(client) - logging.info( + logger.info( f"Finished downgrading the Authz schema from version 3 in Alembic revision {revision}, response: {responses}" ) diff --git a/components/renku_data_services/migrations/versions/71ef5efe740f_add_builds.py b/components/renku_data_services/migrations/versions/71ef5efe740f_add_builds.py new file mode 100644 index 000000000..746fb017d --- /dev/null +++ b/components/renku_data_services/migrations/versions/71ef5efe740f_add_builds.py @@ -0,0 +1,45 @@ +"""Add builds + +Revision ID: 71ef5efe740f +Revises: 1e296d744eac +Create Date: 2025-02-07 14:33:31.895125 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "71ef5efe740f" +down_revision = "1e296d744eac" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "builds", + sa.Column("id", sa.String(length=26), nullable=False), + sa.Column("environment_id", sa.String(length=26), nullable=False), + sa.Column( + "status", sa.Enum("in_progress", "failed", "cancelled", "succeeded", name="buildstatus"), nullable=False + ), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("result_image", sa.String(length=500), nullable=True), + sa.Column("completed_at", sa.DateTime(timezone=True), nullable=True), + sa.Column("result_repository_url", sa.String(length=500), nullable=True), + sa.Column("result_repository_git_commit_sha", sa.String(length=100), nullable=True), + sa.Column("error_reason", sa.String(length=500), nullable=True), + sa.ForeignKeyConstraint(["environment_id"], ["sessions.environments.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + schema="sessions", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("builds", schema="sessions") + op.execute("DROP TYPE buildstatus") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/75c83dd9d619_migrate_copied_custom_environments_to_.py b/components/renku_data_services/migrations/versions/75c83dd9d619_migrate_copied_custom_environments_to_.py new file mode 100644 index 000000000..2074ffcb7 --- /dev/null +++ b/components/renku_data_services/migrations/versions/75c83dd9d619_migrate_copied_custom_environments_to_.py @@ -0,0 +1,106 @@ +"""migrate copied custom environments to make them copies + +Revision ID: 75c83dd9d619 +Revises: 450ae3930996 +Create Date: 2025-02-18 10:17:45.657261 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "75c83dd9d619" +down_revision = "450ae3930996" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + col_names = [ + "name", + "created_by_id", + "description", + "container_image", + "default_url", + "port", + "working_directory", + "mount_directory", + "uid", + "gid", + "environment_kind", + "args", + "command", + "creation_date", + "is_archived ", + ] + col_names_pure = ", ".join(col_names) + col_names_w_prefix = ", ".join(["sessions.environments." + col_name for col_name in col_names]) + + all_duplicate_env_ids_query = ( + "SELECT sessions.launchers.environment_id " + "FROM sessions.launchers " + "INNER JOIN sessions.environments ON sessions.environments.id = sessions.launchers.environment_id " + "WHERE sessions.environments.environment_kind = 'CUSTOM' " + "GROUP BY sessions.launchers.environment_id " + "HAVING COUNT(*) > 1 " + ) + + # NOTE: The two queries that are injected into one another here are fully hardcoded. So to avoid + # having one really long string we disable bandit query injection for the two lines below. + # NOTE: We want to make copies of the environments for N-1 launchers if the total number including + # duplicates is N - row_num below enumerates the duplicates in each launcher/environment group. + enumerated_duplicate_launchers = ( + "SELECT sessions.launchers.id, sessions.launchers.environment_id, " + "row_number() OVER (PARTITION BY sessions.launchers.environment_id ORDER BY sessions.launchers.id) AS row_num " + "FROM sessions.launchers " + f"WHERE sessions.launchers.environment_id IN ({all_duplicate_env_ids_query}) " # nosec B608 + "ORDER BY sessions.launchers.environment_id, sessions.launchers.id " + ) + launcher_ids_to_decouple = f"SELECT a.id FROM ({enumerated_duplicate_launchers}) a WHERE a.row_num > 1 " # nosec B608 + + # Add temporary column in the environments table to store session launcher ids + op.add_column( + "environments", + sa.Column("tmp_launcher_id", sa.VARCHAR(length=30), autoincrement=False, nullable=True), + schema="sessions", + ) + # Make copies of the environments + op.execute( + sa.text( + f"INSERT INTO sessions.environments(id, tmp_launcher_id, {col_names_pure}) " + + f"SELECT generate_ulid(), sessions.launchers.id, {col_names_w_prefix} " + + "FROM sessions.environments " + + "INNER JOIN sessions.launchers ON sessions.launchers.environment_id = sessions.environments.id " + + f"WHERE sessions.launchers.id IN ({launcher_ids_to_decouple}) " + ) + ) + # Update the session launchers to use the copied environments + op.execute( + sa.text( + "UPDATE sessions.launchers " + + "SET environment_id = sessions.environments.id " + + "FROM sessions.environments " + + "WHERE sessions.launchers.id = sessions.environments.tmp_launcher_id " + + "AND sessions.environments.tmp_launcher_id IS NOT NULL " + ) + ) + # Update the environments created_by_id field to be the same as the project + op.execute( + sa.text( + "UPDATE sessions.environments envs " + + "SET created_by_id = prjs.created_by_id " + + "FROM sessions.launchers ls " + + "INNER JOIN projects.projects prjs ON ls.project_id = prjs.id " + + "WHERE envs.id = ls.environment_id " + + "AND envs.tmp_launcher_id IS NOT NULL" + ) + ) + # Drop the temporary column from environments + op.drop_column("environments", "tmp_launcher_id", schema="sessions") + + +def downgrade() -> None: + # NOTE: This just moves and copies data in the DB to fix a bug. So there is no + # need to code the downgrade. + pass diff --git a/components/renku_data_services/migrations/versions/77f46efc541d_add_session_secrets.py b/components/renku_data_services/migrations/versions/77f46efc541d_add_session_secrets.py new file mode 100644 index 000000000..c53ce5b29 --- /dev/null +++ b/components/renku_data_services/migrations/versions/77f46efc541d_add_session_secrets.py @@ -0,0 +1,84 @@ +"""Add session secrets + +Revision ID: 77f46efc541d +Revises: 08ac2714e8e2 +Create Date: 2024-11-13 08:48:32.180897 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "77f46efc541d" +down_revision = "08ac2714e8e2" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "session_secret_slots", + sa.Column("id", sa.String(length=26), nullable=False), + sa.Column("project_id", sa.String(length=26), nullable=False), + sa.Column("name", sa.String(length=99), nullable=False), + sa.Column("description", sa.String(length=500), nullable=True), + sa.Column("filename", sa.String(length=200), nullable=False), + sa.Column("created_by_id", sa.String(length=36), nullable=False), + sa.Column("creation_date", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False), + sa.ForeignKeyConstraint( + ["created_by_id"], + ["users.users.keycloak_id"], + ), + sa.ForeignKeyConstraint(["project_id"], ["projects.projects.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("project_id", "filename", name="_unique_project_id_filename"), + schema="projects", + ) + op.create_index( + op.f("ix_projects_session_secret_slots_created_by_id"), + "session_secret_slots", + ["created_by_id"], + unique=False, + schema="projects", + ) + op.create_index( + op.f("ix_projects_session_secret_slots_project_id"), + "session_secret_slots", + ["project_id"], + unique=False, + schema="projects", + ) + op.create_table( + "session_secrets", + sa.Column("id", sa.String(length=26), nullable=False), + sa.Column("user_id", sa.String(length=36), nullable=False), + sa.Column("secret_slot_id", sa.String(length=26), nullable=False), + sa.Column("secret_id", sa.String(length=26), nullable=False), + sa.ForeignKeyConstraint(["secret_id"], ["secrets.secrets.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["secret_slot_id"], ["projects.session_secret_slots.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["users.users.keycloak_id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("secret_slot_id", "user_id", name="_unique_secret_slot_id_user_id"), + schema="projects", + ) + op.create_index( + op.f("ix_projects_session_secrets_user_id"), "session_secrets", ["user_id"], unique=False, schema="projects" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_projects_session_secrets_user_id"), table_name="session_secrets", schema="projects") + op.drop_table("session_secrets", schema="projects") + op.drop_index( + op.f("ix_projects_session_secret_slots_project_id"), table_name="session_secret_slots", schema="projects" + ) + op.drop_index( + op.f("ix_projects_session_secret_slots_created_by_id"), table_name="session_secret_slots", schema="projects" + ) + op.drop_table("session_secret_slots", schema="projects") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/8413f10ef77f_modify_stored_procedures.py b/components/renku_data_services/migrations/versions/8413f10ef77f_modify_stored_procedures.py new file mode 100644 index 000000000..bbfa8126d --- /dev/null +++ b/components/renku_data_services/migrations/versions/8413f10ef77f_modify_stored_procedures.py @@ -0,0 +1,49 @@ +"""modify stored procedures + +Revision ID: 8413f10ef77f +Revises: fe3b7470d226 +Create Date: 2025-07-09 14:09:38.801974 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "8413f10ef77f" +down_revision = "fe3b7470d226" +branch_labels = None +depends_on = None + +# NOTE: This combines the procedures introduced in migrations 89aa4573cfa9 and f4ad62b7b323. + + +def upgrade() -> None: + op.execute("""CREATE OR REPLACE FUNCTION cleanup_after_slug_deletion() +RETURNS TRIGGER AS +$$ +BEGIN + IF OLD.project_id IS NOT NULL AND OLD.data_connector_id IS NULL THEN + DELETE FROM projects.projects WHERE projects.id = OLD.project_id; + ELSIF old.data_connector_id IS NOT NULL THEN + DELETE FROM storage.data_connectors WHERE data_connectors.id = OLD.data_connector_id; + END IF; + RETURN OLD; +END; +$$ +LANGUAGE plpgsql;""") + op.execute("""CREATE OR REPLACE TRIGGER cleanup_after_slug_deletion +AFTER DELETE ON common.entity_slugs +FOR EACH ROW +EXECUTE FUNCTION cleanup_after_slug_deletion();""") + op.execute("DROP TRIGGER IF EXISTS delete_project_after_slug_deletion ON common.entity_slugs;") + # NOTE: The original slug table was in projects and then was renamed to common.entity_slugs, when + # the table was renamed/moved to common the trigger was not updated but it still worked. + op.execute("DROP TRIGGER IF EXISTS delete_project_after_slug_deletion ON projects.project_slugs;") + op.execute("DROP FUNCTION IF EXISTS delete_project_after_slug_deletion;") + op.execute("DROP TRIGGER IF EXISTS delete_data_connector_after_slug_deletion ON common.entity_slugs;") + op.execute("DROP FUNCTION IF EXISTS delete_data_connector_after_slug_deletion;") + + +def downgrade() -> None: + # NOTE: The procedures from previous versions have bugs so there is no point in re-applying them here. + pass diff --git a/components/renku_data_services/migrations/versions/8d67347dcbec_add_global_slug_to_data_connectors.py b/components/renku_data_services/migrations/versions/8d67347dcbec_add_global_slug_to_data_connectors.py new file mode 100644 index 000000000..f5ee4ec10 --- /dev/null +++ b/components/renku_data_services/migrations/versions/8d67347dcbec_add_global_slug_to_data_connectors.py @@ -0,0 +1,37 @@ +"""Add global_slug to data connectors + +Revision ID: 8d67347dcbec +Revises: 0c205e28f053 +Create Date: 2025-04-08 13:37:03.408870 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "8d67347dcbec" +down_revision = "0c205e28f053" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("data_connectors", sa.Column("global_slug", sa.String(length=99), nullable=True), schema="storage") + op.create_index( + op.f("ix_storage_data_connectors_global_slug"), + "data_connectors", + ["global_slug"], + unique=True, + schema="storage", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.execute("DELETE FROM storage.data_connectors WHERE data_connectors.global_slug IS NOT NULL") + op.drop_index(op.f("ix_storage_data_connectors_global_slug"), table_name="data_connectors", schema="storage") + op.drop_column("data_connectors", "global_slug", schema="storage") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/939c7c649bef_add_disk_storage_to_session_launchers.py b/components/renku_data_services/migrations/versions/939c7c649bef_add_disk_storage_to_session_launchers.py new file mode 100644 index 000000000..3164d5c13 --- /dev/null +++ b/components/renku_data_services/migrations/versions/939c7c649bef_add_disk_storage_to_session_launchers.py @@ -0,0 +1,28 @@ +"""Add disk storage to session launchers + +Revision ID: 939c7c649bef +Revises: d1cdcbb2adc3 +Create Date: 2024-12-20 15:06:01.937878 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "939c7c649bef" +down_revision = "d1cdcbb2adc3" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("launchers", sa.Column("disk_storage", sa.BigInteger(), nullable=True), schema="sessions") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("launchers", "disk_storage", schema="sessions") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/9ec3d97e3b3d_update_sessions_build_parameters_table.py b/components/renku_data_services/migrations/versions/9ec3d97e3b3d_update_sessions_build_parameters_table.py new file mode 100644 index 000000000..68f7d39ac --- /dev/null +++ b/components/renku_data_services/migrations/versions/9ec3d97e3b3d_update_sessions_build_parameters_table.py @@ -0,0 +1,32 @@ +"""update sessions.build_parameters table + +Revision ID: 9ec3d97e3b3d +Revises: b402b9d584bf +Create Date: 2025-07-21 13:33:06.079306 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "9ec3d97e3b3d" +down_revision = "b402b9d584bf" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "build_parameters", sa.Column("repository_revision", sa.String(length=500), nullable=True), schema="sessions" + ) + op.add_column("build_parameters", sa.Column("context_dir", sa.String(length=500), nullable=True), schema="sessions") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("build_parameters", "context_dir", schema="sessions") + op.drop_column("build_parameters", "repository_revision", schema="sessions") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/a1f7f5fbec9a_make_data_connector_ids_unique_in_.py b/components/renku_data_services/migrations/versions/a1f7f5fbec9a_make_data_connector_ids_unique_in_.py new file mode 100644 index 000000000..955dbd479 --- /dev/null +++ b/components/renku_data_services/migrations/versions/a1f7f5fbec9a_make_data_connector_ids_unique_in_.py @@ -0,0 +1,37 @@ +"""make data connector ids unique in entity slug table + +Revision ID: a1f7f5fbec9a +Revises: 2d3bf387ef9a +Create Date: 2025-03-19 22:07:18.165355 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "a1f7f5fbec9a" +down_revision = "2d3bf387ef9a" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("ix_common_entity_slugs_data_connector_id", table_name="entity_slugs", schema="common") + op.create_index( + op.f("ix_common_entity_slugs_data_connector_id"), + "entity_slugs", + ["data_connector_id"], + unique=True, + schema="common", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_common_entity_slugs_data_connector_id"), table_name="entity_slugs", schema="common") + op.create_index( + "ix_common_entity_slugs_data_connector_id", "entity_slugs", ["data_connector_id"], unique=False, schema="common" + ) + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/a59e60e0338f_update_user_secrets.py b/components/renku_data_services/migrations/versions/a59e60e0338f_update_user_secrets.py new file mode 100644 index 000000000..65f362b67 --- /dev/null +++ b/components/renku_data_services/migrations/versions/a59e60e0338f_update_user_secrets.py @@ -0,0 +1,62 @@ +"""Update user secrets + +Changes the `secrets.secrets` table: +* Rename `name` to `default_filename` +* Add new `name` column + +Revision ID: a59e60e0338f +Revises: 77f46efc541d +Create Date: 2024-11-19 14:12:54.752176 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "a59e60e0338f" +down_revision = "77f46efc541d" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + connection = op.get_bind() + op.drop_constraint("_unique_name_user", "secrets", schema="secrets", type_="unique") + op.alter_column("secrets", column_name="name", new_column_name="default_filename", schema="secrets") + op.add_column("secrets", sa.Column("name", sa.String(length=99), nullable=True), schema="secrets") + op.create_unique_constraint( + "_unique_user_id_default_filename", "secrets", ["user_id", "default_filename"], schema="secrets" + ) + + # Set the "name" value for existing user secrets to be equal to "default_filename" + op.execute(sa.text("LOCK TABLE secrets.secrets IN EXCLUSIVE MODE")) + secrets_stmt = sa.select( + sa.column("id", type_=sa.String), sa.column("default_filename", type_=sa.String) + ).select_from(sa.table("secrets", schema="secrets")) + secrets = connection.execute(secrets_stmt).all() + for secret in secrets: + (secret_id, default_filename) = secret.tuple() + update_stmt = ( + sa.update( + sa.table( + "secrets", sa.column("id", type_=sa.String), sa.column("name", type_=sa.String), schema="secrets" + ) + ) + .where(sa.column("id", type_=sa.String) == secret_id) + .values(name=default_filename) + ) + connection.execute(update_stmt) + + op.alter_column("secrets", column_name="name", nullable=False, schema="secrets") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint("_unique_user_id_default_filename", "secrets", schema="secrets", type_="unique") + op.drop_column("secrets", "name", schema="secrets") + op.alter_column("secrets", column_name="default_filename", new_column_name="name", schema="secrets") + op.create_unique_constraint("_unique_name_user", "secrets", ["user_id", "name"], schema="secrets") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/b0a52ff8335a_slugs_for_data_connector_which_are_.py b/components/renku_data_services/migrations/versions/b0a52ff8335a_slugs_for_data_connector_which_are_.py new file mode 100644 index 000000000..b0528096e --- /dev/null +++ b/components/renku_data_services/migrations/versions/b0a52ff8335a_slugs_for_data_connector_which_are_.py @@ -0,0 +1,45 @@ +"""slugs for data connector which are owned by projects + +Revision ID: b0a52ff8335a +Revises: 483af0d70cf4 +Create Date: 2025-01-22 16:38:57.220486 + +""" + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "b0a52ff8335a" +down_revision = "483af0d70cf4" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("entity_slugs_unique_slugs", table_name="entity_slugs", schema="common") + op.create_index( + "entity_slugs_unique_slugs", + "entity_slugs", + ["namespace_id", "project_id", "data_connector_id", "slug"], + unique=True, + schema="common", + postgresql_nulls_not_distinct=True, + ) + op.drop_constraint("either_project_id_or_data_connector_id_is_set", "entity_slugs", schema="common", type_="check") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + "entity_slugs_unique_slugs", table_name="entity_slugs", schema="common", postgresql_nulls_not_distinct=True + ) + op.create_index("entity_slugs_unique_slugs", "entity_slugs", ["namespace_id", "slug"], unique=True, schema="common") + op.create_check_constraint( + "either_project_id_or_data_connector_id_is_set", + "entity_slugs", + "CAST (project_id IS NOT NULL AS int) + CAST (data_connector_id IS NOT NULL AS int) BETWEEN 0 AND 1", + schema="common", + ) + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/b402b9d584bf_add_ingress_parameters_for_remote_clusters.py b/components/renku_data_services/migrations/versions/b402b9d584bf_add_ingress_parameters_for_remote_clusters.py new file mode 100644 index 000000000..dbb3faf36 --- /dev/null +++ b/components/renku_data_services/migrations/versions/b402b9d584bf_add_ingress_parameters_for_remote_clusters.py @@ -0,0 +1,55 @@ +"""Add ingress parameters for remote clusters + +Revision ID: b402b9d584bf +Revises: 8413f10ef77f +Create Date: 2025-06-20 11:05:22.238236 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "b402b9d584bf" +down_revision = "8413f10ef77f" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "clusters", sa.Column("session_protocol", sa.String(length=10), nullable=False), schema="resource_pools" + ) + op.add_column("clusters", sa.Column("session_host", sa.String(length=256), nullable=False), schema="resource_pools") + op.add_column("clusters", sa.Column("session_port", sa.Integer(), nullable=False), schema="resource_pools") + op.add_column("clusters", sa.Column("session_path", sa.String(), nullable=False), schema="resource_pools") + op.add_column( + "clusters", + sa.Column( + "session_ingress_annotations", + sa.JSON().with_variant(postgresql.JSONB(astext_type=sa.Text()), "postgresql"), + nullable=False, + ), + schema="resource_pools", + ) + op.add_column( + "clusters", sa.Column("session_tls_secret_name", sa.String(length=256), nullable=False), schema="resource_pools" + ) + op.add_column( + "clusters", sa.Column("session_storage_class", sa.String(length=256), nullable=True), schema="resource_pools" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("clusters", "session_storage_class", schema="resource_pools") + op.drop_column("clusters", "session_tls_secret_name", schema="resource_pools") + op.drop_column("clusters", "session_ingress_annotations", schema="resource_pools") + op.drop_column("clusters", "session_path", schema="resource_pools") + op.drop_column("clusters", "session_port", schema="resource_pools") + op.drop_column("clusters", "session_host", schema="resource_pools") + op.drop_column("clusters", "session_protocol", schema="resource_pools") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/ca3731b65787_add_service_account_name_to_cluster.py b/components/renku_data_services/migrations/versions/ca3731b65787_add_service_account_name_to_cluster.py new file mode 100644 index 000000000..fe273bc1d --- /dev/null +++ b/components/renku_data_services/migrations/versions/ca3731b65787_add_service_account_name_to_cluster.py @@ -0,0 +1,30 @@ +"""add service account name to cluster + +Revision ID: ca3731b65787 +Revises: 9ec3d97e3b3d +Create Date: 2025-07-07 13:25:48.254099 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "ca3731b65787" +down_revision = "9ec3d97e3b3d" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "clusters", sa.Column("service_account_name", sa.String(length=256), nullable=True), schema="resource_pools" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("clusters", "service_account_name", schema="resource_pools") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/ca87e5b43a44_create_k8s_cache_tables.py b/components/renku_data_services/migrations/versions/ca87e5b43a44_create_k8s_cache_tables.py new file mode 100644 index 000000000..f42eeb28e --- /dev/null +++ b/components/renku_data_services/migrations/versions/ca87e5b43a44_create_k8s_cache_tables.py @@ -0,0 +1,60 @@ +"""create k8s cache tables + +Revision ID: ca87e5b43a44 +Revises: a1f7f5fbec9a +Create Date: 2025-04-04 14:19:00.340544 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "ca87e5b43a44" +down_revision = "a1f7f5fbec9a" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "k8s_objects", + sa.Column("id", sa.String(26), server_default=sa.text("generate_ulid()"), nullable=False), + sa.Column("name", sa.String(), nullable=False), + sa.Column("namespace", sa.String(), nullable=False), + sa.Column("creation_date", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False), + sa.Column("manifest", postgresql.JSONB(), nullable=False), + sa.Column("deleted", sa.Boolean(), nullable=False), + sa.Column("version", sa.String(), nullable=False), + sa.Column("kind", sa.String(), nullable=False), + sa.Column("cluster", sa.String(), nullable=False), + sa.Column("user_id", sa.String(), nullable=False), + sa.PrimaryKeyConstraint("id"), + schema="common", + ) + op.create_index(op.f("ix_common_k8s_objects_cluster"), "k8s_objects", ["cluster"], unique=False, schema="common") + op.create_index(op.f("ix_common_k8s_objects_deleted"), "k8s_objects", ["deleted"], unique=False, schema="common") + op.create_index(op.f("ix_common_k8s_objects_kind"), "k8s_objects", ["kind"], unique=False, schema="common") + op.create_index(op.f("ix_common_k8s_objects_name"), "k8s_objects", ["name"], unique=True, schema="common") + op.create_index( + op.f("ix_common_k8s_objects_namespace"), "k8s_objects", ["namespace"], unique=False, schema="common" + ) + op.create_index(op.f("ix_common_k8s_objects_user_id"), "k8s_objects", ["user_id"], unique=False, schema="common") + op.create_index(op.f("ix_common_k8s_objects_version"), "k8s_objects", ["version"], unique=False, schema="common") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_common_k8s_objects_version"), table_name="k8s_objects", schema="common") + op.drop_index(op.f("ix_common_k8s_objects_user_id"), table_name="k8s_objects", schema="common") + op.drop_index(op.f("ix_common_k8s_objects_namespace"), table_name="k8s_objects", schema="common") + op.drop_index(op.f("ix_common_k8s_objects_name"), table_name="k8s_objects", schema="common") + op.drop_index(op.f("ix_common_k8s_objects_kind"), table_name="k8s_objects", schema="common") + op.drop_index(op.f("ix_common_k8s_objects_deleted"), table_name="k8s_objects", schema="common") + op.drop_index(op.f("ix_common_k8s_objects_cluster"), table_name="k8s_objects", schema="common") + op.drop_table("k8s_objects", schema="common") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/ce1a111d6694_authz_add_initial_schema.py b/components/renku_data_services/migrations/versions/ce1a111d6694_authz_add_initial_schema.py index f8e68ddfa..ed23dc096 100644 --- a/components/renku_data_services/migrations/versions/ce1a111d6694_authz_add_initial_schema.py +++ b/components/renku_data_services/migrations/versions/ce1a111d6694_authz_add_initial_schema.py @@ -6,11 +6,12 @@ """ -import logging - +from renku_data_services.app_config import logging from renku_data_services.authz.config import AuthzConfig from renku_data_services.authz.schemas import v1 +logger = logging.getLogger(__name__) + # revision identifiers, used by Alembic. revision = "ce1a111d6694" down_revision = "89aa4573cfa9" @@ -22,7 +23,7 @@ def upgrade() -> None: config = AuthzConfig.from_env() client = config.authz_client() responses = v1.upgrade(client) - logging.info( + logger.info( f"Finished upgrading the Authz schema to version 1 in Alembic revision {revision}, response: {responses}" ) @@ -31,6 +32,6 @@ def downgrade() -> None: config = AuthzConfig.from_env() client = config.authz_client() responses = v1.downgrade(client) - logging.info( + logger.info( f"Finished downgrading the Authz schema from version 1 in Alembic revision {revision}, response: {responses}" ) diff --git a/components/renku_data_services/migrations/versions/cfda91a3a6a6_add_exclusive_role_relations.py b/components/renku_data_services/migrations/versions/cfda91a3a6a6_add_exclusive_role_relations.py new file mode 100644 index 000000000..0ab44c012 --- /dev/null +++ b/components/renku_data_services/migrations/versions/cfda91a3a6a6_add_exclusive_role_relations.py @@ -0,0 +1,37 @@ +"""Add exclusive role relations + +Revision ID: cfda91a3a6a6 +Revises: f4ad62b7b323 +Create Date: 2025-06-13 16:06:26.421053 + +""" + +from renku_data_services.app_config import logging +from renku_data_services.authz.config import AuthzConfig +from renku_data_services.authz.schemas import v7 + +# revision identifiers, used by Alembic. +revision = "cfda91a3a6a6" +down_revision = "f4ad62b7b323" +branch_labels = None +depends_on = None + +logger = logging.getLogger(__name__) + + +def upgrade() -> None: + config = AuthzConfig.from_env() + client = config.authz_client() + responses = v7.upgrade(client) + logger.info( + f"Finished upgrading the Authz schema to version 7 in Alembic revision {revision}, response: {responses}" + ) + + +def downgrade() -> None: + config = AuthzConfig.from_env() + client = config.authz_client() + responses = v7.downgrade(client) + logger.info( + f"Finished downgrading the Authz schema from version 7 in Alembic revision {revision}, response: {responses}" + ) diff --git a/components/renku_data_services/migrations/versions/d1cdcbb2adc3_add_secrets_mount_directory_to_projects.py b/components/renku_data_services/migrations/versions/d1cdcbb2adc3_add_secrets_mount_directory_to_projects.py new file mode 100644 index 000000000..178f78f00 --- /dev/null +++ b/components/renku_data_services/migrations/versions/d1cdcbb2adc3_add_secrets_mount_directory_to_projects.py @@ -0,0 +1,40 @@ +"""Add secrets_mount_directory to projects + +Revision ID: d1cdcbb2adc3 +Revises: a59e60e0338f +Create Date: 2024-11-27 14:34:45.594157 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "d1cdcbb2adc3" +down_revision = "a59e60e0338f" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + connection = op.get_bind() + op.add_column( + "projects", + sa.Column("secrets_mount_directory", sa.String(), nullable=False, server_default="/secrets"), + schema="projects", + ) + + # Force the `updated_at` column to be updated on all projects. This is done to invalidate all ETags. + op.execute(sa.text("LOCK TABLE projects.projects IN EXCLUSIVE MODE")) + touch_stmt = sa.update(sa.table("projects", sa.Column("updated_at"), schema="projects")).values( + updated_at=sa.func.now() + ) + connection.execute(touch_stmt) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("projects", "secrets_mount_directory", schema="projects") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/d71f0f795d30_allow_environments_to_be_archived.py b/components/renku_data_services/migrations/versions/d71f0f795d30_allow_environments_to_be_archived.py new file mode 100644 index 000000000..927919266 --- /dev/null +++ b/components/renku_data_services/migrations/versions/d71f0f795d30_allow_environments_to_be_archived.py @@ -0,0 +1,32 @@ +"""allow environments to be archived + +Revision ID: d71f0f795d30 +Revises: 939c7c649bef +Create Date: 2025-01-10 07:50:44.144549 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "d71f0f795d30" +down_revision = "939c7c649bef" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "environments", + sa.Column("is_archived", sa.Boolean(), server_default=sa.text("false"), nullable=False), + schema="sessions", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("environments", "is_archived", schema="sessions") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/dcb9648c3c15_make_group_a_separate_field.py b/components/renku_data_services/migrations/versions/dcb9648c3c15_make_group_a_separate_field.py new file mode 100644 index 000000000..6c4d9431f --- /dev/null +++ b/components/renku_data_services/migrations/versions/dcb9648c3c15_make_group_a_separate_field.py @@ -0,0 +1,36 @@ +"""make group a separate field + +Revision ID: dcb9648c3c15 +Revises: 042eeb50cd8e +Create Date: 2025-05-13 01:16:39.076648 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "dcb9648c3c15" +down_revision = "042eeb50cd8e" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column("k8s_objects", sa.Column("group", sa.String(), nullable=True), schema="common") + op.create_index(op.f("ix_common_k8s_objects_group"), "k8s_objects", ["group"], unique=False, schema="common") + op.execute( + "UPDATE common.k8s_objects " + "SET \"group\" = SPLIT_PART(k8s_objects.version, '/', 1), " + "version = SPLIT_PART(k8s_objects.version, '/', 2) " + "WHERE POSITION('/' IN k8s_objects.version) > 0" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_common_k8s_objects_group"), table_name="k8s_objects", schema="common") + op.drop_column("k8s_objects", "group", schema="common") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/ee719a5fabf6_create_staging_table_for_metrics.py b/components/renku_data_services/migrations/versions/ee719a5fabf6_create_staging_table_for_metrics.py new file mode 100644 index 000000000..a3cbfdbef --- /dev/null +++ b/components/renku_data_services/migrations/versions/ee719a5fabf6_create_staging_table_for_metrics.py @@ -0,0 +1,42 @@ +"""create staging table for metrics + +Revision ID: ee719a5fabf6 +Revises: 1d2f64a405aa +Create Date: 2025-03-28 13:59:48.127757 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +from renku_data_services.utils.sqlalchemy import ULIDType + +# revision identifiers, used by Alembic. +revision = "ee719a5fabf6" +down_revision = "1d2f64a405aa" +branch_labels = ("metrics",) +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "metrics", + sa.Column("id", ULIDType(), server_default=sa.text("generate_ulid()"), nullable=False), + sa.Column("anonymous_user_id", sa.String(), nullable=False), + sa.Column("event", sa.String(), nullable=False), + sa.Column("timestamp", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False), + sa.Column( + "metadata", sa.JSON().with_variant(postgresql.JSONB(astext_type=sa.Text()), "postgresql"), nullable=True + ), + sa.PrimaryKeyConstraint("id"), + schema="metrics", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("metrics", schema="metrics") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/f34b87ddd954_migrate_namespaces_to_authzed.py b/components/renku_data_services/migrations/versions/f34b87ddd954_migrate_namespaces_to_authzed.py index 792529335..e1cf32014 100644 --- a/components/renku_data_services/migrations/versions/f34b87ddd954_migrate_namespaces_to_authzed.py +++ b/components/renku_data_services/migrations/versions/f34b87ddd954_migrate_namespaces_to_authzed.py @@ -10,11 +10,6 @@ from alembic import op from sqlalchemy.orm import Session -from renku_data_services.app_config.config import Config as AppConfig -from renku_data_services.message_queue.avro_models.io.renku.events import v2 -from renku_data_services.message_queue.converters import EventConverter -from renku_data_services.message_queue.models import Event -from renku_data_services.migrations.utils import UtilityEventLoop from renku_data_services.namespace.orm import GroupORM # revision identifiers, used by Alembic. @@ -24,21 +19,12 @@ depends_on = None -async def add_events(session: Session, app_config: AppConfig, events: list[Event]) -> None: - for event in events: - await app_config.event_repo.store_event(session, event) - - def upgrade() -> None: - app_config = AppConfig.from_env() with Session(bind=op.get_bind()) as session, session.begin(): # Delete all groups groups = session.scalars(sa.select(GroupORM)).all() for group in groups: session.delete(group) - group_model = group.dump() - events = EventConverter.to_events(group_model, v2.GroupRemoved) - UtilityEventLoop.run(add_events(session, app_config, events)) # ### commands auto generated by Alembic - please adjust! ### op.drop_index("ix_common_group_members_group_id", table_name="group_members", schema="common") diff --git a/components/renku_data_services/migrations/versions/f4ad62b7b323_cleanup_data_connector_when_slug_is_.py b/components/renku_data_services/migrations/versions/f4ad62b7b323_cleanup_data_connector_when_slug_is_.py new file mode 100644 index 000000000..7f6863dfd --- /dev/null +++ b/components/renku_data_services/migrations/versions/f4ad62b7b323_cleanup_data_connector_when_slug_is_.py @@ -0,0 +1,50 @@ +"""Cleanup data connector when slug is removed + +Revision ID: f4ad62b7b323 +Revises: dcb9648c3c15 +Create Date: 2025-05-19 07:15:11.989650 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "f4ad62b7b323" +down_revision = "dcb9648c3c15" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + """Register a trigger and function to remove a data connector when its slug is removed. + + This is necessary because we only have a foreign key on the slugs table pointing to Data Connectors, so + we remove slugs when a data connector is removed. But we also want to remove projects when a slug is removed + because this can occur when you delete a group and all data connectors within the group should also be deleted.""" + op.execute(sa.text("LOCK TABLE common.entity_slugs IN EXCLUSIVE MODE")) + op.execute(sa.text("LOCK TABLE storage.data_connectors IN EXCLUSIVE MODE")) + # NOTE: OLD variable holds the name of the row that is being deleted (i.e. the trigger) + op.execute("""CREATE OR REPLACE FUNCTION delete_data_connector_after_slug_deletion() +RETURNS TRIGGER AS +$$ +BEGIN + DELETE FROM storage.data_connectors WHERE data_connectors.id = OLD.data_connector_id; + RETURN OLD; +END; +$$ +LANGUAGE plpgsql;""") + op.execute("""CREATE OR REPLACE TRIGGER delete_data_connector_after_slug_deletion +AFTER DELETE ON common.entity_slugs +FOR EACH ROW +EXECUTE FUNCTION delete_data_connector_after_slug_deletion();""") + # NOTE: Here we cleanup the data connectors which have neither a namespaced slug nor a global slug. + op.execute("""DELETE FROM storage.data_connectors +WHERE data_connectors.id NOT IN ( + SELECT entity_slugs.data_connector_id FROM common.entity_slugs WHERE entity_slugs.data_connector_id IS NOT NULL +) AND data_connectors.global_slug IS NULL""") + + +def downgrade() -> None: + op.execute("DROP TRIGGER IF EXISTS delete_data_connector_after_slug_deletion ON storage.data_connectors CASCADE;") + op.execute("DROP FUNCTION IF EXISTS delete_data_connector_after_slug_deletion CASCADE;") diff --git a/components/renku_data_services/migrations/versions/f6203f71982a_authz_add_groups_and_namespaces_in_.py b/components/renku_data_services/migrations/versions/f6203f71982a_authz_add_groups_and_namespaces_in_.py index 2fec2e15d..cef77f220 100644 --- a/components/renku_data_services/migrations/versions/f6203f71982a_authz_add_groups_and_namespaces_in_.py +++ b/components/renku_data_services/migrations/versions/f6203f71982a_authz_add_groups_and_namespaces_in_.py @@ -6,11 +6,12 @@ """ -import logging - +from renku_data_services.app_config import logging from renku_data_services.authz.config import AuthzConfig from renku_data_services.authz.schemas import v2 +logger = logging.getLogger(__name__) + # revision identifiers, used by Alembic. revision = "f6203f71982a" down_revision = "7e5edc3b84b9" @@ -22,7 +23,7 @@ def upgrade() -> None: config = AuthzConfig.from_env() client = config.authz_client() responses = v2.upgrade(client) - logging.info( + logger.info( f"Finished upgrading the Authz schema to version 2 in Alembic revision {revision}, response: {responses}" ) @@ -31,6 +32,6 @@ def downgrade() -> None: config = AuthzConfig.from_env() client = config.authz_client() responses = v2.downgrade(client) - logging.info( + logger.info( f"Finished downgrading the Authz schema from version 2 in Alembic revision {revision}, response: {responses}" ) diff --git a/components/renku_data_services/migrations/versions/fe3b7470d226_remove_redis_message_queue.py b/components/renku_data_services/migrations/versions/fe3b7470d226_remove_redis_message_queue.py new file mode 100644 index 000000000..ce15dd832 --- /dev/null +++ b/components/renku_data_services/migrations/versions/fe3b7470d226_remove_redis_message_queue.py @@ -0,0 +1,43 @@ +"""remove redis message queue + +Revision ID: fe3b7470d226 +Revises: cfda91a3a6a6 +Create Date: 2025-06-30 16:28:18.919140 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "fe3b7470d226" +down_revision = "cfda91a3a6a6" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_table("events", schema="events") + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "events", + sa.Column( + "id", + sa.INTEGER(), + sa.Identity(always=True, start=1, increment=1, minvalue=1, maxvalue=2147483647, cycle=False, cache=1), + autoincrement=True, + nullable=False, + ), + sa.Column("queue", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("payload", postgresql.JSONB(astext_type=sa.Text()), autoincrement=False, nullable=False), + sa.Column("timestamp_utc", postgresql.TIMESTAMP(), autoincrement=False, nullable=False), + sa.PrimaryKeyConstraint("id", name="events_pkey"), + schema="events", + ) + # ### end Alembic commands ### diff --git a/components/renku_data_services/namespace/api.spec.yaml b/components/renku_data_services/namespace/api.spec.yaml index 78393eac3..e38715378 100644 --- a/components/renku_data_services/namespace/api.spec.yaml +++ b/components/renku_data_services/namespace/api.spec.yaml @@ -12,13 +12,9 @@ paths: get: summary: Get all groups parameters: - - in: query - description: query parameters - name: params - style: form - explode: true - schema: - $ref: "#/components/schemas/GroupsGetQuery" + - $ref: "#/components/parameters/PaginationRequestPage" + - $ref: "#/components/parameters/PaginationRequestPerPage" + - $ref: "#/components/parameters/OnlyDirectMember" responses: "200": description: List of groups @@ -260,13 +256,10 @@ paths: get: summary: Get all namespaces parameters: - - in: query - description: query parameters - name: params - style: form - explode: true - schema: - $ref: "#/components/schemas/NamespaceGetQuery" + - $ref: "#/components/parameters/PaginationRequestPage" + - $ref: "#/components/parameters/PaginationRequestPerPage" + - $ref: "#/components/parameters/MinimumRole" + - $ref: "#/components/parameters/NamespaceKind" responses: "200": description: List of namespaces @@ -325,6 +318,37 @@ paths: $ref: "#/components/responses/Error" tags: - namespaces + /namespaces/{first_slug}/{second_slug}: + get: + summary: Get a namespace + parameters: + - in: path + name: first_slug + required: true + schema: + $ref: "#/components/schemas/Slug" + - in: path + name: second_slug + required: true + schema: + $ref: "#/components/schemas/Slug" + responses: + "200": + description: The namespace + content: + application/json: + schema: + $ref: "#/components/schemas/NamespaceResponse" + "404": + description: The namespace does not exist + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - namespaces components: schemas: GroupResponseList: @@ -341,7 +365,7 @@ components: name: $ref: "#/components/schemas/NamespaceName" slug: - $ref: "#/components/schemas/Slug" + $ref: "#/components/schemas/SlugResponse" creation_date: $ref: "#/components/schemas/CreationDate" created_by: @@ -411,6 +435,11 @@ components: # - cannot contain uppercase characters pattern: '^(?!.*\.git$|.*\.atom$|.*[\-._][\-._].*)[a-z0-9][a-z0-9\-_.]*$' example: "a-slug-example" + SlugResponse: + description: A command-line/url friendly name for a namespace + type: string + minLength: 1 + example: "a-slug-example" CreationDate: description: The date and time the resource was created (in UTC and ISO-8601 format) type: string @@ -500,6 +529,7 @@ components: enum: - group - user + - project NamespaceResponseList: description: A list of Renku namespaces type: array @@ -514,17 +544,20 @@ components: name: $ref: "#/components/schemas/NamespaceName" slug: - $ref: "#/components/schemas/Slug" + $ref: "#/components/schemas/SlugResponse" creation_date: $ref: "#/components/schemas/CreationDate" created_by: $ref: "#/components/schemas/KeycloakId" namespace_kind: $ref: "#/components/schemas/NamespaceKind" + path: + $ref: "#/components/schemas/SlugResponse" required: - "id" - "namespace_kind" - "slug" + - "path" example: id: "01AN4Z79ZS5XN0F25N3DB94T4R" name: "R-Project Group" @@ -532,6 +565,7 @@ components: created_by: "owner-keycloak-id" creation_date: "2024-03-04T13:04:45Z" namespace_kind: "group" + path: "r-project" UserId: type: string description: Keycloak user ID @@ -543,23 +577,15 @@ components: example: John minLength: 1 maxLength: 256 - NamespaceGetQuery: - description: Query params for namespace get request - allOf: - - $ref: "#/components/schemas/PaginationRequest" - - properties: - minimum_role: - description: A minimum role to filter results by. - $ref: "#/components/schemas/GroupRole" - GroupsGetQuery: - description: Query params for namespace get request - allOf: - - $ref: "#/components/schemas/PaginationRequest" - - properties: - direct_member: - description: A flag to filter groups where the user is a direct member. - type: boolean - default: false + NamespaceGetQueryKind: + type: array + description: Which namespace kinds to include in the response + items: + $ref: "#/components/schemas/NamespaceKind" + default: + - user + - group + minItems: 1 GroupPermissions: description: The set of permissions on a group type: object @@ -573,21 +599,17 @@ components: change_membership: description: The user can manage group members type: boolean - PaginationRequest: - type: object - additionalProperties: false - properties: - page: - description: Result's page number starting from 1 - type: integer - minimum: 1 - default: 1 - per_page: - description: The number of results per page - type: integer - minimum: 1 - maximum: 100 - default: 20 + PaginationRequestPage: + description: Result's page number starting from 1 + type: integer + minimum: 1 + default: 1 + PaginationRequestPerPage: + description: The number of results per page + type: integer + minimum: 1 + maximum: 100 + default: 20 ErrorResponse: type: object properties: @@ -610,6 +632,48 @@ components: - "message" required: - "error" + parameters: + PaginationRequestPage: + in: query + description: the current page in paginated response + name: page + style: form + explode: true + schema: + $ref: "#/components/schemas/PaginationRequestPage" + PaginationRequestPerPage: + in: query + description: the number of results per page in a paginated response + name: per_page + style: form + explode: true + schema: + $ref: "#/components/schemas/PaginationRequestPerPage" + MinimumRole: + in: query + description: The minimum role the user should have in the resources returned + name: minimum_role + style: form + explode: true + schema: + $ref: "#/components/schemas/GroupRole" + NamespaceKind: + in: query + description: environment kinds query parameter + name: kinds + style: form + explode: true + schema: + $ref: "#/components/schemas/NamespaceGetQueryKind" + OnlyDirectMember: + in: query + description: A flag to filter for where the user is a direct member. + name: direct_member + style: form + explode: true + schema: + type: boolean + default: false responses: Error: description: The schema for all 4xx and 5xx responses diff --git a/components/renku_data_services/namespace/apispec.py b/components/renku_data_services/namespace/apispec.py index 174809e30..656a5dbec 100644 --- a/components/renku_data_services/namespace/apispec.py +++ b/components/renku_data_services/namespace/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-10-22T07:46:53+00:00 +# timestamp: 2025-04-04T08:11:56+00:00 from __future__ import annotations @@ -21,6 +21,7 @@ class GroupRole(Enum): class NamespaceKind(Enum): group = "group" user = "user" + project = "project" class NamespaceResponse(BaseAPISpec): @@ -34,31 +35,35 @@ class NamespaceResponse(BaseAPISpec): name: Optional[str] = Field( None, description="Renku group or namespace name", - example="My Renku Group :)", + examples=["My Renku Group :)"], max_length=99, min_length=1, ) slug: str = Field( ..., description="A command-line/url friendly name for a namespace", - example="a-slug-example", - max_length=99, + examples=["a-slug-example"], min_length=1, - pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", ) creation_date: Optional[datetime] = Field( None, description="The date and time the resource was created (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", + examples=["2023-11-01T17:32:28Z"], ) created_by: Optional[str] = Field( None, description="Member's KeyCloak ID", - example="123-keycloak-user-id-456", + examples=["123-keycloak-user-id-456"], min_length=1, pattern="^[A-Za-z0-9-]+$", ) namespace_kind: NamespaceKind + path: str = Field( + ..., + description="A command-line/url friendly name for a namespace", + examples=["a-slug-example"], + min_length=1, + ) class GroupPermissions(BaseAPISpec): @@ -69,28 +74,45 @@ class GroupPermissions(BaseAPISpec): ) -class PaginationRequest(BaseAPISpec): - model_config = ConfigDict( - extra="forbid", - ) - page: int = Field(1, description="Result's page number starting from 1", ge=1) - per_page: int = Field( - 20, description="The number of results per page", ge=1, le=100 - ) - - class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): error: Error +class GroupsGetParametersQuery(BaseAPISpec): + page: Optional[int] = Field( + None, description="Result's page number starting from 1", ge=1 + ) + per_page: Optional[int] = Field( + None, description="The number of results per page", ge=1, le=100 + ) + direct_member: bool = False + + +class NamespacesGetParametersQuery(BaseAPISpec): + page: Optional[int] = Field( + None, description="Result's page number starting from 1", ge=1 + ) + per_page: Optional[int] = Field( + None, description="The number of results per page", ge=1, le=100 + ) + minimum_role: Optional[GroupRole] = None + kinds: Optional[List[NamespaceKind]] = Field( + None, + description="Which namespace kinds to include in the response", + min_length=1, + ) + + class GroupResponse(BaseAPISpec): id: str = Field( ..., @@ -102,27 +124,25 @@ class GroupResponse(BaseAPISpec): name: str = Field( ..., description="Renku group or namespace name", - example="My Renku Group :)", + examples=["My Renku Group :)"], max_length=99, min_length=1, ) slug: str = Field( ..., description="A command-line/url friendly name for a namespace", - example="a-slug-example", - max_length=99, + examples=["a-slug-example"], min_length=1, - pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", ) creation_date: datetime = Field( ..., description="The date and time the resource was created (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", + examples=["2023-11-01T17:32:28Z"], ) created_by: str = Field( ..., description="Member's KeyCloak ID", - example="123-keycloak-user-id-456", + examples=["123-keycloak-user-id-456"], min_length=1, pattern="^[A-Za-z0-9-]+$", ) @@ -138,14 +158,14 @@ class GroupPostRequest(BaseAPISpec): name: str = Field( ..., description="Renku group or namespace name", - example="My Renku Group :)", + examples=["My Renku Group :)"], max_length=99, min_length=1, ) slug: str = Field( ..., description="A command-line/url friendly name for a namespace", - example="a-slug-example", + examples=["a-slug-example"], max_length=99, min_length=1, pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", @@ -162,14 +182,14 @@ class GroupPatchRequest(BaseAPISpec): name: Optional[str] = Field( None, description="Renku group or namespace name", - example="My Renku Group :)", + examples=["My Renku Group :)"], max_length=99, min_length=1, ) slug: Optional[str] = Field( None, description="A command-line/url friendly name for a namespace", - example="a-slug-example", + examples=["a-slug-example"], max_length=99, min_length=1, pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", @@ -186,13 +206,13 @@ class GroupMemberResponse(BaseAPISpec): id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) namespace: Optional[str] = Field( None, description="A command-line/url friendly name for a namespace", - example="a-slug-example", + examples=["a-slug-example"], max_length=99, min_length=1, pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", @@ -200,14 +220,14 @@ class GroupMemberResponse(BaseAPISpec): first_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) last_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) @@ -221,7 +241,7 @@ class GroupMemberPatchRequest(BaseAPISpec): id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) role: GroupRole @@ -231,15 +251,17 @@ class GroupMemberResponseList(RootModel[List[GroupMemberResponse]]): root: List[GroupMemberResponse] = Field( ..., description="List of members and their access level to the group", - example=[ - {"id": "some-keycloak-user-id", "role": "owner"}, - { - "id": "another-keycloak-user-id", - "role": "member", - "namespace": "user-slug", - "first_name": "John", - "last_name": "Doe", - }, + examples=[ + [ + {"id": "some-keycloak-user-id", "role": "owner"}, + { + "id": "another-keycloak-user-id", + "role": "member", + "namespace": "user-slug", + "first_name": "John", + "last_name": "Doe", + }, + ] ], min_length=0, ) @@ -249,9 +271,11 @@ class GroupMemberPatchRequestList(RootModel[List[GroupMemberPatchRequest]]): root: List[GroupMemberPatchRequest] = Field( ..., description="List of members and their access level to the group", - example=[ - {"id": "some-keycloak-user-id", "role": "owner"}, - {"id": "another-keycloak-user-id", "role": "member"}, + examples=[ + [ + {"id": "some-keycloak-user-id", "role": "owner"}, + {"id": "another-keycloak-user-id", "role": "member"}, + ] ], min_length=0, ) @@ -261,25 +285,5 @@ class NamespaceResponseList(RootModel[List[NamespaceResponse]]): root: List[NamespaceResponse] = Field(..., description="A list of Renku namespaces") -class NamespaceGetQuery(PaginationRequest): - minimum_role: Optional[GroupRole] = Field( - None, description="A minimum role to filter results by." - ) - - -class GroupsGetQuery(PaginationRequest): - direct_member: bool = Field( - False, description="A flag to filter groups where the user is a direct member." - ) - - -class GroupsGetParametersQuery(BaseAPISpec): - params: Optional[GroupsGetQuery] = None - - -class NamespacesGetParametersQuery(BaseAPISpec): - params: Optional[NamespaceGetQuery] = None - - class GroupResponseList(RootModel[List[GroupResponse]]): root: List[GroupResponse] = Field(..., description="A list of Renku groups") diff --git a/components/renku_data_services/namespace/apispec_base.py b/components/renku_data_services/namespace/apispec_base.py index 476c07927..07de2bd2b 100644 --- a/components/renku_data_services/namespace/apispec_base.py +++ b/components/renku_data_services/namespace/apispec_base.py @@ -1,8 +1,16 @@ """Base models for API specifications.""" +import pydantic from pydantic import BaseModel, field_validator from ulid import ULID +# NOTE: We are monkeypatching the regex engine for the root model because +# the datamodel code generator that makes classes from the API spec does not +# support setting this for the root model and by default the root model is using +# the rust regex create which does not support lookahead/behind regexs and we need +# that functionality to parse slugs and prevent certain suffixes in slug names. +pydantic.RootModel.model_config = {"regex_engine": "python-re"} + class BaseAPISpec(BaseModel): """Base API specification.""" diff --git a/components/renku_data_services/namespace/apispec_enhanced.py b/components/renku_data_services/namespace/apispec_enhanced.py new file mode 100644 index 000000000..6c419461c --- /dev/null +++ b/components/renku_data_services/namespace/apispec_enhanced.py @@ -0,0 +1,18 @@ +"""Modified data classes for the apispec.""" + +from typing import Any + +from pydantic import field_validator + +from renku_data_services.namespace.apispec import NamespacesGetParametersQuery as _NamespacesGetParametersQuery + + +class NamespacesGetParametersQuery(_NamespacesGetParametersQuery): + """The query parameters for listing namespaces.""" + + @field_validator("kinds", mode="before") + @classmethod + def _convert_to_kinds_to_list(cls, value: Any) -> list[str]: + if isinstance(value, list): + return value + return [str(value)] diff --git a/components/renku_data_services/namespace/blueprints.py b/components/renku_data_services/namespace/blueprints.py index ed6010f6c..63b2167f3 100644 --- a/components/renku_data_services/namespace/blueprints.py +++ b/components/renku_data_services/namespace/blueprints.py @@ -7,14 +7,16 @@ from sanic_ext import validate import renku_data_services.base_models as base_models -from renku_data_services.authz.models import Role, UnsavedMember -from renku_data_services.base_api.auth import authenticate, only_authenticated +from renku_data_services.authz.models import Change, Role, UnsavedMember +from renku_data_services.base_api.auth import authenticate, only_authenticated, validate_path_user_id from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint from renku_data_services.base_api.misc import validate_body_root_model, validate_query from renku_data_services.base_api.pagination import PaginationRequest, paginate +from renku_data_services.base_models.core import NamespaceSlug, ProjectPath, Slug +from renku_data_services.base_models.metrics import MetricsService from renku_data_services.base_models.validation import validate_and_dump, validated_json from renku_data_services.errors import errors -from renku_data_services.namespace import apispec, models +from renku_data_services.namespace import apispec, apispec_enhanced, models from renku_data_services.namespace.core import validate_group_patch from renku_data_services.namespace.db import GroupRepository @@ -25,15 +27,19 @@ class GroupsBP(CustomBlueprint): group_repo: GroupRepository authenticator: base_models.Authenticator + metrics: MetricsService def get_all(self) -> BlueprintFactoryResponse: """List all groups.""" @authenticate(self.authenticator) - @validate_query(query=apispec.GroupsGetQuery) + @validate_query(query=apispec.GroupsGetParametersQuery) @paginate async def _get_all( - _: Request, user: base_models.APIUser, pagination: PaginationRequest, query: apispec.GroupsGetQuery + _: Request, + user: base_models.APIUser, + pagination: PaginationRequest, + query: apispec.GroupsGetParametersQuery, ) -> tuple[list[dict], int]: groups, rec_count = await self.group_repo.get_groups( user=user, pagination=pagination, direct_member=query.direct_member @@ -54,6 +60,7 @@ def post(self) -> BlueprintFactoryResponse: async def _post(_: Request, user: base_models.APIUser, body: apispec.GroupPostRequest) -> JSONResponse: new_group = models.UnsavedGroup(**body.model_dump()) result = await self.group_repo.insert_group(user=user, payload=new_group) + await self.metrics.group_created(user) return validated_json(apispec.GroupResponse, result, 201) return "/groups", ["POST"], _post @@ -62,7 +69,7 @@ def get_one(self) -> BlueprintFactoryResponse: """Get a specific group.""" @authenticate(self.authenticator) - async def _get_one(_: Request, user: base_models.APIUser, slug: str) -> JSONResponse: + async def _get_one(_: Request, user: base_models.APIUser, slug: Slug) -> JSONResponse: result = await self.group_repo.get_group(user=user, slug=slug) return validated_json(apispec.GroupResponse, result) @@ -73,7 +80,7 @@ def delete(self) -> BlueprintFactoryResponse: @authenticate(self.authenticator) @only_authenticated - async def _delete(_: Request, user: base_models.APIUser, slug: str) -> HTTPResponse: + async def _delete(_: Request, user: base_models.APIUser, slug: Slug) -> HTTPResponse: await self.group_repo.delete_group(user=user, slug=slug) return HTTPResponse(status=204) @@ -86,7 +93,7 @@ def patch(self) -> BlueprintFactoryResponse: @only_authenticated @validate(json=apispec.GroupPatchRequest) async def _patch( - _: Request, user: base_models.APIUser, slug: str, body: apispec.GroupPatchRequest + _: Request, user: base_models.APIUser, slug: Slug, body: apispec.GroupPatchRequest ) -> JSONResponse: group_patch = validate_group_patch(body) res = await self.group_repo.update_group(user=user, slug=slug, patch=group_patch) @@ -98,7 +105,7 @@ def get_all_members(self) -> BlueprintFactoryResponse: """List all group members.""" @authenticate(self.authenticator) - async def _get_all_members(_: Request, user: base_models.APIUser, slug: str) -> JSONResponse: + async def _get_all_members(_: Request, user: base_models.APIUser, slug: Slug) -> JSONResponse: members = await self.group_repo.get_group_members(user, slug) return validated_json( apispec.GroupMemberResponseList, @@ -123,7 +130,7 @@ def update_members(self) -> BlueprintFactoryResponse: @only_authenticated @validate_body_root_model(json=apispec.GroupMemberPatchRequestList) async def _update_members( - _: Request, user: base_models.APIUser, slug: str, body: apispec.GroupMemberPatchRequestList + _: Request, user: base_models.APIUser, slug: Slug, body: apispec.GroupMemberPatchRequestList ) -> JSONResponse: members = [UnsavedMember(Role.from_group_role(member.role), member.id) for member in body.root] res = await self.group_repo.update_group_members( @@ -131,6 +138,10 @@ async def _update_members( slug=slug, members=members, ) + + if any(m.change == Change.ADD for m in res): + await self.metrics.group_member_added(user) + return validated_json( apispec.GroupMemberPatchRequestList, [ @@ -148,8 +159,9 @@ def delete_member(self) -> BlueprintFactoryResponse: """Remove a specific user from the list of members of a group.""" @authenticate(self.authenticator) + @validate_path_user_id @only_authenticated - async def _delete_member(_: Request, user: base_models.APIUser, slug: str, user_id: str) -> HTTPResponse: + async def _delete_member(_: Request, user: base_models.APIUser, slug: Slug, user_id: str) -> HTTPResponse: await self.group_repo.delete_group_member(user=user, slug=slug, user_id_to_delete=user_id) return HTTPResponse(status=204) @@ -159,7 +171,7 @@ def get_permissions(self) -> BlueprintFactoryResponse: """Get the permissions of the current user on the group.""" @authenticate(self.authenticator) - async def _get_permissions(_: Request, user: base_models.APIUser, slug: str) -> JSONResponse: + async def _get_permissions(_: Request, user: base_models.APIUser, slug: Slug) -> JSONResponse: permissions = await self.group_repo.get_group_permissions(user=user, slug=slug) return validated_json(apispec.GroupPermissions, permissions) @@ -170,15 +182,24 @@ def get_namespaces(self) -> BlueprintFactoryResponse: @authenticate(self.authenticator) @only_authenticated - @validate_query(query=apispec.NamespaceGetQuery) + @validate_query(query=apispec_enhanced.NamespacesGetParametersQuery) @paginate async def _get_namespaces( - request: Request, user: base_models.APIUser, pagination: PaginationRequest, query: apispec.NamespaceGetQuery + request: Request, + user: base_models.APIUser, + pagination: PaginationRequest, + query: apispec.NamespacesGetParametersQuery, ) -> tuple[list[dict], int]: minimum_role = Role.from_group_role(query.minimum_role) if query.minimum_role is not None else None + if query.kinds: + kinds = [models.NamespaceKind(kind.value) for kind in query.kinds] + else: + # NOTE: This is for API backwards compatibility reasons, removing or modifying + # this default will result in a breaking API change. + kinds = [models.NamespaceKind.group, models.NamespaceKind.user] nss, total_count = await self.group_repo.get_namespaces( - user=user, pagination=pagination, minimum_role=minimum_role + user=user, pagination=pagination, minimum_role=minimum_role, kinds=kinds ) return validate_and_dump( apispec.NamespaceResponseList, @@ -186,10 +207,13 @@ async def _get_namespaces( dict( id=ns.id, name=ns.name, - slug=ns.latest_slug if ns.latest_slug else ns.slug, + slug=ns.latest_slug + if ns.latest_slug + else (ns.path.second.value if isinstance(ns, models.ProjectNamespace) else ns.path.first.value), created_by=ns.created_by, creation_date=ns.creation_date, namespace_kind=apispec.NamespaceKind(ns.kind.value), + path=ns.path.serialize(), ) for ns in nss ], @@ -198,11 +222,11 @@ async def _get_namespaces( return "/namespaces", ["GET"], _get_namespaces def get_namespace(self) -> BlueprintFactoryResponse: - """Get namespace by slug.""" + """Get user or group namespace by slug.""" @authenticate(self.authenticator) - async def _get_namespace(_: Request, user: base_models.APIUser, slug: str) -> JSONResponse: - ns = await self.group_repo.get_namespace_by_slug(user=user, slug=slug) + async def _get_namespace(_: Request, user: base_models.APIUser, slug: Slug) -> JSONResponse: + ns = await self.group_repo.get_namespace_by_slug(user=user, slug=NamespaceSlug(slug.value)) if not ns: raise errors.MissingResourceError(message=f"The namespace with slug {slug} does not exist") return validated_json( @@ -210,11 +234,38 @@ async def _get_namespace(_: Request, user: base_models.APIUser, slug: str) -> JS dict( id=ns.id, name=ns.name, - slug=ns.latest_slug if ns.latest_slug else ns.slug, + slug=ns.latest_slug or ns.path.last().value, created_by=ns.created_by, creation_date=None, # NOTE: we do not save creation date in the DB namespace_kind=apispec.NamespaceKind(ns.kind.value), + path=ns.path.serialize(), ), ) return "/namespaces/", ["GET"], _get_namespace + + def get_namespace_second_level(self) -> BlueprintFactoryResponse: + """Get project namespaces by slug (i.e. user1/projec2).""" + + @authenticate(self.authenticator) + async def _get_namespace_second_level( + _: Request, user: base_models.APIUser, first_slug: Slug, second_slug: Slug + ) -> JSONResponse: + path = ProjectPath.from_strings(first_slug.value, second_slug.value) + ns = await self.group_repo.get_namespace_by_path(user=user, path=path) + if not ns: + raise errors.MissingResourceError(message=f"The namespace with slug {path} does not exist") + return validated_json( + apispec.NamespaceResponse, + dict( + id=ns.id, + name=ns.name, + slug=ns.latest_slug or ns.path.last().value, + created_by=ns.created_by, + creation_date=None, # NOTE: we do not save creation date in the DB + namespace_kind=apispec.NamespaceKind(ns.kind.value), + path=ns.path.serialize(), + ), + ) + + return "/namespaces//", ["GET"], _get_namespace_second_level diff --git a/components/renku_data_services/namespace/db.py b/components/renku_data_services/namespace/db.py index 154adea31..e84e1334b 100644 --- a/components/renku_data_services/namespace/db.py +++ b/components/renku_data_services/namespace/db.py @@ -7,29 +7,41 @@ from collections.abc import AsyncGenerator, Callable from contextlib import nullcontext from datetime import UTC, datetime +from typing import Any, overload -from sanic.log import logger -from sqlalchemy import delete, func, select, text +from sqlalchemy import Select, delete, func, select, text from sqlalchemy.exc import IntegrityError -from sqlalchemy.ext.asyncio import AsyncSession -from sqlalchemy.orm import joinedload +from sqlalchemy.ext.asyncio import AsyncSession, AsyncSessionTransaction +from sqlalchemy.orm import joinedload, selectinload +from sqlalchemy.sql.functions import count as sa_count +from ulid import ULID import renku_data_services.base_models as base_models from renku_data_services import errors +from renku_data_services.app_config import logging from renku_data_services.authz.authz import Authz, AuthzOperation, ResourceType from renku_data_services.authz.models import CheckPermissionItem, Member, MembershipChange, Role, Scope, UnsavedMember -from renku_data_services.base_api.pagination import PaginationRequest -from renku_data_services.message_queue import events -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import GroupAdded, GroupRemoved, GroupUpdated -from renku_data_services.message_queue.db import EventRepository -from renku_data_services.message_queue.interface import IMessageQueue -from renku_data_services.message_queue.redis_queue import dispatch_message +from renku_data_services.base_api.pagination import PaginationRequest, paginate_queries +from renku_data_services.base_models.core import ( + DataConnectorInProjectPath, + DataConnectorPath, + NamespacePath, + NamespaceSlug, + ProjectPath, + Slug, +) +from renku_data_services.data_connectors.models import DataConnector from renku_data_services.namespace import models from renku_data_services.namespace import orm as schemas +from renku_data_services.project.orm import ProjectORM +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.decorators import update_search_document from renku_data_services.users import models as user_models from renku_data_services.users import orm as user_schemas from renku_data_services.utils.core import with_db_transaction +logger = logging.getLogger(__name__) + class GroupRepository: """Repository for groups.""" @@ -37,18 +49,16 @@ class GroupRepository: def __init__( self, session_maker: Callable[..., AsyncSession], - event_repo: EventRepository, group_authz: Authz, - message_queue: IMessageQueue, + search_updates_repo: SearchUpdatesRepo, ) -> None: self.session_maker = session_maker self.authz: Authz = group_authz - self.event_repo: EventRepository = event_repo - self.message_queue: IMessageQueue = message_queue + self.search_updates_repo = search_updates_repo @with_db_transaction @Authz.authz_change(AuthzOperation.insert_many, ResourceType.user_namespace) - @dispatch_message(events.InsertUserNamespace) + @update_search_document async def generate_user_namespaces(self, *, session: AsyncSession | None = None) -> list[user_models.UserInfo]: """Generate user namespaces if the user table has data and the namespaces table is empty.""" if not session: @@ -90,6 +100,8 @@ async def get_groups( groups_orm = result.scalars().all() stmt_count = select(func.count()).select_from(schemas.GroupORM) + if direct_member: + stmt_count = stmt_count.where(schemas.GroupORM.id.in_(group_ids)) result = await session.execute(stmt_count) n_total_elements = result.scalar() or 0 return [g.dump() for g in groups_orm], n_total_elements @@ -105,18 +117,18 @@ async def get_all_groups(self, requested_by: base_models.APIUser) -> AsyncGenera yield group.dump() async def _get_group( - self, session: AsyncSession, user: base_models.APIUser, slug: str, load_members: bool = False + self, session: AsyncSession, user: base_models.APIUser, slug: Slug, load_members: bool = False ) -> tuple[schemas.GroupORM, list[Member]]: transaction = nullcontext() if session.in_transaction() else session.begin() async with transaction: stmt = select(schemas.GroupORM).where( - schemas.GroupORM.namespace.has(schemas.NamespaceORM.slug == slug.lower()) + schemas.GroupORM.namespace.has(schemas.NamespaceORM.slug == slug.value.lower()) ) group = await session.scalar(stmt) if not group: stmt_old_ns = ( select(schemas.NamespaceOldORM) - .where(schemas.NamespaceOldORM.slug == slug.lower()) + .where(schemas.NamespaceOldORM.slug == slug.value.lower()) .order_by(schemas.NamespaceOldORM.created_at.desc()) .limit(1) .options( @@ -135,7 +147,7 @@ async def _get_group( members = await self.authz.members(user, ResourceType.group, group.id) return group, members - async def get_group(self, user: base_models.APIUser, slug: str) -> models.Group: + async def get_group(self, user: base_models.APIUser, slug: Slug) -> models.Group: """Get a group from the DB.""" async with self.session_maker() as session: group_orm, _ = await self._get_group(session, user, slug) @@ -143,7 +155,7 @@ async def get_group(self, user: base_models.APIUser, slug: str) -> models.Group: @with_db_transaction async def get_group_members( - self, user: base_models.APIUser, slug: str, *, session: AsyncSession | None = None + self, user: base_models.APIUser, slug: Slug, *, session: AsyncSession | None = None ) -> list[models.GroupMemberDetails]: """Get all the users that are direct members of a group.""" if not session: @@ -169,24 +181,24 @@ async def get_group_members( ] @with_db_transaction - @dispatch_message(GroupUpdated) + @update_search_document async def update_group( - self, user: base_models.APIUser, slug: str, patch: models.GroupPatch, *, session: AsyncSession | None = None + self, user: base_models.APIUser, slug: Slug, patch: models.GroupPatch, *, session: AsyncSession | None = None ) -> models.Group: """Update a group in the DB.""" if not session: raise errors.ProgrammingError(message="A database session is required") group, _ = await self._get_group(session, user, slug) - if group.namespace.slug != slug.lower(): + if group.namespace.slug != slug.value.lower(): raise errors.UpdatingWithStaleContentError( - message=f"You cannot update a group by using its old slug {slug}.", + message=f"You cannot update a group by using its old slug {slug.value}.", detail=f"The latest slug is {group.namespace.slug}, please use this for updates.", ) authorized = await self.authz.has_permission(user, ResourceType.group, group.id, Scope.DELETE) if not authorized: raise errors.MissingResourceError( - message=f"Group with slug '{slug}' does not exist or you do not have access to it." + message=f"Group with slug '{slug.value}' does not exist or you do not have access to it." ) new_slug_str = patch.slug.lower() if patch.slug is not None else None @@ -209,11 +221,10 @@ async def update_group( return group.dump() @with_db_transaction - @dispatch_message(events.GroupMembershipChanged) async def update_group_members( self, user: base_models.APIUser, - slug: str, + slug: Slug, members: list[UnsavedMember], *, session: AsyncSession | None = None, @@ -222,9 +233,9 @@ async def update_group_members( if not session: raise errors.ProgrammingError(message="A database session is required") group, _ = await self._get_group(session, user, slug, load_members=True) - if group.namespace.slug != slug.lower(): + if group.namespace.slug != slug.value.lower(): raise errors.UpdatingWithStaleContentError( - message=f"You cannot update group members by using an old group slug {slug}.", + message=f"You cannot update group members by using an old group slug {slug.value}.", detail=f"The latest slug is {group.namespace.slug}, please use this for updates.", ) @@ -235,9 +246,9 @@ async def update_group_members( @with_db_transaction @Authz.authz_change(AuthzOperation.delete, ResourceType.group) - @dispatch_message(GroupRemoved) + @update_search_document async def delete_group( - self, user: base_models.APIUser, slug: str, *, session: AsyncSession | None = None + self, user: base_models.APIUser, slug: Slug, *, session: AsyncSession | None = None ) -> models.DeletedGroup | None: """Delete a specific group.""" if not session: @@ -247,9 +258,9 @@ async def delete_group( group, _ = await self._get_group(session, user, slug) except errors.MissingResourceError: return None - if group.namespace.slug != slug.lower(): + if group.namespace.slug != slug.value.lower(): raise errors.UpdatingWithStaleContentError( - message=f"You cannot delete a group by using an old group slug {slug}.", + message=f"You cannot delete a group by using an old group slug {slug.value}.", detail=f"The latest slug is {group.namespace.slug}, please use this for deletions.", ) # NOTE: We have a stored procedure that gets triggered when a project slug is removed to remove the project. @@ -261,9 +272,8 @@ async def delete_group( return models.DeletedGroup(id=group.id) @with_db_transaction - @dispatch_message(events.GroupMembershipChanged) async def delete_group_member( - self, user: base_models.APIUser, slug: str, user_id_to_delete: str, *, session: AsyncSession | None = None + self, user: base_models.APIUser, slug: Slug, user_id_to_delete: str, *, session: AsyncSession | None = None ) -> list[MembershipChange]: """Delete a specific group member.""" if not session: @@ -276,7 +286,7 @@ async def delete_group_member( @with_db_transaction @Authz.authz_change(AuthzOperation.create, ResourceType.group) - @dispatch_message(GroupAdded) + @update_search_document async def insert_group( self, user: base_models.APIUser, @@ -306,13 +316,13 @@ async def insert_group( raise errors.ValidationError( message="The slug for the group should be unique but it already exists in the database", detail="Please modify the slug field and then retry", - ) + ) from err raise err # NOTE: This is needed to populate the relationship fields in the group after inserting the ID above await session.refresh(group) return group.dump() - async def get_group_permissions(self, user: base_models.APIUser, slug: str) -> models.GroupPermissions: + async def get_group_permissions(self, user: base_models.APIUser, slug: Slug) -> models.GroupPermissions: """Get the permissions of the user on a given group.""" group = await self.get_group(user=user, slug=slug) @@ -335,8 +345,12 @@ async def get_group_permissions(self, user: base_models.APIUser, slug: str) -> m return permissions async def get_namespaces( - self, user: base_models.APIUser, pagination: PaginationRequest, minimum_role: Role | None = None - ) -> tuple[list[models.Namespace], int]: + self, + user: base_models.APIUser, + pagination: PaginationRequest, + minimum_role: Role | None = None, + kinds: list[models.NamespaceKind] | None = None, + ) -> tuple[list[models.UserNamespace | models.GroupNamespace | models.ProjectNamespace], int]: """Get all namespaces.""" scope = Scope.READ if minimum_role == Role.VIEWER: @@ -347,27 +361,45 @@ async def get_namespaces( scope = Scope.DELETE async with self.session_maker() as session, session.begin(): - group_ids = await self.authz.resources_with_permission(user, user.id, ResourceType.group, scope) - group_ns_stmt = select(schemas.NamespaceORM).where(schemas.NamespaceORM.group_id.in_(group_ids)) - output = [] - personal_ns_stmt = select(schemas.NamespaceORM).where(schemas.NamespaceORM.user_id == user.id) - personal_ns = await session.scalar(personal_ns_stmt) - if personal_ns and pagination.page == 1: - output.append(personal_ns.dump()) - # NOTE: in the first page the personal namespace is added, so the offset and per page params are modified - group_per_page = pagination.per_page - 1 if personal_ns and pagination.page == 1 else pagination.per_page - group_offset = pagination.offset - 1 if personal_ns and pagination.page > 1 else pagination.offset - group_ns = await session.scalars( - group_ns_stmt.limit(group_per_page).offset(group_offset).order_by(schemas.NamespaceORM.id) - ) - group_count = ( - await session.scalar(group_ns_stmt.with_only_columns(func.count(schemas.NamespaceORM.id))) or 0 - ) - if personal_ns is not None: - group_count += 1 - for ns_orm in group_ns: - output.append(ns_orm.dump()) - return output, group_count + group_ids: list[str] = [] + project_ids: list[str] = [] + user_ids: list[str] = [] + if not kinds or models.NamespaceKind.group in kinds: + group_ids = await self.authz.resources_with_permission(user, user.id, ResourceType.group, scope) + if (not kinds or models.NamespaceKind.user in kinds) and user.id: + user_ids.append(user.id) + if not kinds or models.NamespaceKind.project in kinds: + project_ids = await self.authz.resources_with_permission(user, user.id, ResourceType.project, scope) + + queries: list[tuple[Select[tuple[Any]], int]] = [ + ( + select(schemas.NamespaceORM) + .where(schemas.NamespaceORM.user_id.in_(user_ids)) + .order_by(schemas.NamespaceORM.id), + len(user_ids), + ), + ( + select(schemas.NamespaceORM) + .where(schemas.NamespaceORM.group_id.in_(group_ids)) + .order_by(schemas.NamespaceORM.id), + len(group_ids), + ), + (select(ProjectORM).where(ProjectORM.id.in_(project_ids)).order_by(ProjectORM.id), len(project_ids)), + ] + + results = await paginate_queries(pagination, session, queries) + output: list[models.UserNamespace | models.GroupNamespace | models.ProjectNamespace] = [] + for res in results: + match res: + case schemas.NamespaceORM(): + output.append(res.dump()) + case ProjectORM(): + output.append(res.dump_as_namespace()) + case x: + raise errors.ProgrammingError( + message=f"Got an unexpected type of object when listing namespaces {type(x)}" + ) + return output, len(group_ids) + len(user_ids) + len(project_ids) async def _get_user_namespaces(self) -> AsyncGenerator[user_models.UserInfo, None]: """Lists all user namespaces without regard for authorization or permissions, used for migrations.""" @@ -378,15 +410,26 @@ async def _get_user_namespaces(self) -> AsyncGenerator[user_models.UserInfo, Non async for namespace in namespaces: yield namespace.dump_user() - async def get_namespace_by_slug(self, user: base_models.APIUser, slug: str) -> models.Namespace | None: + async def get_namespace_by_slug( + self, user: base_models.APIUser, slug: NamespaceSlug, session: AsyncSession | None = None + ) -> models.UserNamespace | models.GroupNamespace | None: """Get the namespace identified by a given slug.""" - async with self.session_maker() as session, session.begin(): - ns = await session.scalar(select(schemas.NamespaceORM).where(schemas.NamespaceORM.slug == slug.lower())) + session_ctx: AsyncSession | nullcontext = nullcontext() + transaction: AsyncSessionTransaction | nullcontext = nullcontext() + if session is None: + session = self.session_maker() + session_ctx = session + transaction = session.begin() + + async with session_ctx, transaction: + ns = await session.scalar( + select(schemas.NamespaceORM).where(schemas.NamespaceORM.slug == slug.value.lower()) + ) old_ns = None if not ns: old_ns = await session.scalar( select(schemas.NamespaceOldORM) - .where(schemas.NamespaceOldORM.slug == slug.lower()) + .where(schemas.NamespaceOldORM.slug == slug.value.lower()) .order_by(schemas.NamespaceOldORM.created_at.desc()) .limit(1) ) @@ -409,6 +452,236 @@ async def get_namespace_by_slug(self, user: base_models.APIUser, slug: str) -> m ) return ns.dump() + @overload + async def get_namespace_by_path( + self, + user: base_models.APIUser, + path: NamespacePath, + session: AsyncSession | None = None, + ) -> models.UserNamespace | models.GroupNamespace | None: ... + @overload + async def get_namespace_by_path( + self, + user: base_models.APIUser, + path: ProjectPath, + session: AsyncSession | None = None, + ) -> models.ProjectNamespace | None: ... + + async def get_namespace_by_path( + self, + user: base_models.APIUser, + path: NamespacePath | ProjectPath, + session: AsyncSession | None = None, + ) -> models.UserNamespace | models.GroupNamespace | models.ProjectNamespace | None: + """Get the namespace identified by a given slug. + + If a DB session is not passed in, the one will be started and closed in this function. + """ + not_found_message = f"The namespace with path {path} cannot be found or you do not have permissions to view it" + + session_ctx: AsyncSession | nullcontext = nullcontext() + transaction: AsyncSessionTransaction | nullcontext = nullcontext() + if session is None: + session = self.session_maker() + session_ctx = session + transaction = session.begin() + + match path: + case NamespacePath(usr_or_grp_slug): + ns = await self.get_namespace_by_slug(user, usr_or_grp_slug, session) + return ns + case ProjectPath(usr_or_grp_slug, prj_slug): + async with session_ctx, transaction: + stmt = ( + select(schemas.EntitySlugORM) + .where(schemas.EntitySlugORM.namespace.has(schemas.NamespaceORM.slug == usr_or_grp_slug.value)) + .where(schemas.EntitySlugORM.project_id.is_not(None)) + .where(schemas.EntitySlugORM.slug == prj_slug.value) + .where(schemas.EntitySlugORM.data_connector_id.is_(None)) + .options(selectinload(schemas.EntitySlugORM.project).joinedload(ProjectORM.slug)) + ) + prj = await session.scalar(stmt) + if not prj or not prj.project_id: + raise errors.MissingResourceError(message=not_found_message) + allowed = await self.authz.has_permission(user, ResourceType.project, prj.project_id, Scope.READ) + if not allowed: + raise errors.MissingResourceError(message=not_found_message) + return prj.dump_project_namespace() + case _: + raise errors.ValidationError( + message=f"Received an unsupported number of slugs when looking up namespace by slugs {path}" + ) + + async def move_data_connector( + self, + user: base_models.APIUser, + dc: DataConnector, + new: DataConnectorPath | DataConnectorInProjectPath, + session: AsyncSession | None = None, + ) -> None: + """Rename or move a namespace.""" + + async def _check_ns_permissions( + user: base_models.APIUser, authz: Authz, ns: models.Namespace, scope: Scope + ) -> None: + """Helper function to check for namespace permissions.""" + is_user_namespace = ns.kind == models.NamespaceKind.user + ns_id = ns.id if is_user_namespace else ns.underlying_resource_id + allowed = await authz.has_permission(user, ns.kind.to_resource_type(), ns_id, scope) + if not allowed: + raise errors.missing_or_unauthorized(ns.kind, ns.underlying_resource_id) + + async def _get_dc_slug(session: AsyncSession, dc_id: ULID) -> schemas.EntitySlugORM: + """Helper function to get the data connector slug or raise an exception.""" + dc_slug = await session.scalar( + select(schemas.EntitySlugORM).where(schemas.EntitySlugORM.data_connector_id == dc_id) + ) + if not dc_slug: + raise errors.missing_or_unauthorized(ResourceType.data_connector, dc_id) + return dc_slug + + async def _check_dc_slug_not_taken( + session: AsyncSession, new_namespace: models.Namespace, new_slug: Slug + ) -> None: + """Helper function to make sure a new data connector slug is available.""" + stmt = ( + select(sa_count("*")) + .select_from(schemas.EntitySlugORM) + .where(schemas.EntitySlugORM.namespace_id == new_namespace.id) + .where(schemas.EntitySlugORM.data_connector_id.is_not(None)) + .where(schemas.EntitySlugORM.slug == new_slug.value) + ) + if new_namespace.kind == models.NamespaceKind.project: + stmt = stmt.where(schemas.EntitySlugORM.project_id == new_namespace.underlying_resource_id) + else: + stmt = stmt.where(schemas.EntitySlugORM.project_id.is_(None)) + + cnt = await session.scalar(stmt) + if cnt is not None and cnt > 0: + raise errors.ValidationError( + message=f"The owner already has a data connector with slug {new_slug}, please try a different one" + ) + + async def _upsert_old_dc_slug(session: AsyncSession, old_dc_slug: schemas.EntitySlugORM) -> None: + """This function checks if an old entity slug exists and if so then it updates it. + + If the old entity slug does not exists then it inserts one. + This is needed so that when a slug is renamed then the old slug still points to the new + and current entity. + """ + stmt = select(schemas.EntitySlugOldORM).where(schemas.EntitySlugOldORM.slug == old_dc_slug.slug) + if old_dc_slug.project_id is not None: + stmt.where(schemas.EntitySlugOldORM.project_id == old_dc_slug.project_id) + else: + stmt.where(schemas.EntitySlugOldORM.project_id.is_(None)) + if old_dc_slug.data_connector_id is not None: + stmt.where(schemas.EntitySlugOldORM.data_connector_id == old_dc_slug.data_connector_id) + else: + stmt.where(schemas.EntitySlugOldORM.data_connector_id.is_(None)) + existing_old_slug = await session.scalar(stmt) + + if not existing_old_slug: + session.add( + schemas.EntitySlugOldORM( + slug=old_dc_slug.slug, + latest_slug_id=old_dc_slug.id, + project_id=old_dc_slug.project_id, + data_connector_id=old_dc_slug.data_connector_id, + ) + ) + return + + existing_old_slug.slug = old_dc_slug.slug + existing_old_slug.latest_slug_id = old_dc_slug.id + existing_old_slug.project_id = old_dc_slug.project_id + existing_old_slug.data_connector_id = old_dc_slug.data_connector_id + + session_ctx: AsyncSession | nullcontext = nullcontext() + transaction: AsyncSessionTransaction | nullcontext = nullcontext() + if session is None: + session = self.session_maker() + session_ctx = session + transaction = session.begin() + + async with session_ctx, transaction: + required_scope = Scope.WRITE if dc.namespace.path == new.parent() else Scope.DELETE + allowed_dc = await self.authz.has_permission(user, ResourceType.data_connector, dc.id, required_scope) + if not allowed_dc: + raise errors.missing_or_unauthorized(ResourceType.data_connector, dc.id) + dc_slug = await _get_dc_slug(session, dc.id) + + match dc.path.parent(), new.parent(), dc.path.last(), new.last(): + case (new_path, old_path, old_slug, new_slug) if new_path == old_path and old_slug == new_slug: + pass + case (new_path, old_path, old_slug, new_slug) if new_path == old_path and old_slug != new_slug: + await _check_dc_slug_not_taken(session, dc.namespace, new_slug) + await _upsert_old_dc_slug(session, dc_slug) + dc_slug.slug = new_slug.value + case (NamespacePath(), NamespacePath() as new_path, old_slug, new_slug): + new_usr_grp_ns = await self.get_namespace_by_path(user, new_path, session) + if new_usr_grp_ns is None: + raise errors.MissingResourceError( + message=f"The data connector namespace {new.parent()} cannot be found or " + "you do not have sufficient permissions to access it." + ) + await _check_ns_permissions(user, self.authz, dc.namespace, Scope.WRITE) + await _check_ns_permissions(user, self.authz, new_usr_grp_ns, Scope.WRITE) + await _check_dc_slug_not_taken(session, new_usr_grp_ns, new_slug) + await _upsert_old_dc_slug(session, dc_slug) + dc_slug.namespace_id = new_usr_grp_ns.id + if old_slug != new_slug: + dc_slug.slug = new_slug.value + case (ProjectPath(), ProjectPath() as new_path, old_slug, new_slug): + new_proj_ns = await self.get_namespace_by_path(user, new_path, session) + if new_proj_ns is None: + raise errors.MissingResourceError( + message=f"The data connector namespace {new_path} cannot be found or " + "you do not have sufficient permissions to access it." + ) + await _check_ns_permissions(user, self.authz, dc.namespace, Scope.WRITE) + await _check_ns_permissions(user, self.authz, new_proj_ns, Scope.WRITE) + await _check_dc_slug_not_taken(session, new_proj_ns, new_slug) + await _upsert_old_dc_slug(session, dc_slug) + dc_slug.project_id = new_proj_ns.underlying_resource_id + dc_slug.namespace_id = new_proj_ns.id + if old_slug != new_slug: + dc_slug.slug = new_slug.value + case (ProjectPath(), NamespacePath() as new_path, old_slug, new_slug): + new_usr_grp_ns = await self.get_namespace_by_path(user, new_path, session) + if new_usr_grp_ns is None: + raise errors.MissingResourceError( + message=f"The data connector namespace {new_path} cannot be found or " + "you do not have sufficient permissions to access it." + ) + await _check_ns_permissions(user, self.authz, dc.namespace, Scope.WRITE) + await _check_ns_permissions(user, self.authz, new_usr_grp_ns, Scope.WRITE) + await _check_dc_slug_not_taken(session, new_usr_grp_ns, new_slug) + await _upsert_old_dc_slug(session, dc_slug) + dc_slug.project_id = None + dc_slug.namespace_id = new_usr_grp_ns.id + if old_slug != new_slug: + dc_slug.slug = new_slug.value + case (NamespacePath(), ProjectPath() as new_path, old_slug, new_slug): + new_proj_ns = await self.get_namespace_by_path(user, new_path, session) + if new_proj_ns is None: + raise errors.MissingResourceError( + message=f"The data connector namespace {new_path} cannot be found or " + "you do not have sufficient permissions to access it." + ) + await _check_ns_permissions(user, self.authz, dc.namespace, Scope.WRITE) + await _check_ns_permissions(user, self.authz, new_proj_ns, Scope.WRITE) + await _check_dc_slug_not_taken(session, new_proj_ns, new_slug) + await _upsert_old_dc_slug(session, dc_slug) + dc_slug.project_id = new_proj_ns.underlying_resource_id + dc_slug.namespace_id = new_proj_ns.id + if old_slug != new_slug: + dc_slug.slug = new_slug.value + case _: + raise errors.ProgrammingError( + message=f"Received unexpected old ({dc.path}) and new ({new}) " + "path combination when changing data connector ownership." + ) + async def get_user_namespace(self, user_id: str) -> models.Namespace | None: """Get the namespace corresponding to a given user.""" async with self.session_maker() as session, session.begin(): @@ -417,7 +690,7 @@ async def get_user_namespace(self, user_id: str) -> models.Namespace | None: return None if not ns.user or not ns.user_id: raise errors.ProgrammingError(message="Found a namespace that has no user associated with it.") - return ns.dump() + return ns.dump_user_namespace() async def _create_user_namespace_slug( self, session: AsyncSession, user_slug: str, retry_enumerate: int = 0, retry_random: bool = False diff --git a/components/renku_data_services/namespace/models.py b/components/renku_data_services/namespace/models.py index ddafe7206..6e9914bc3 100644 --- a/components/renku_data_services/namespace/models.py +++ b/components/renku_data_services/namespace/models.py @@ -1,12 +1,15 @@ """Group models.""" -from dataclasses import dataclass +from dataclasses import dataclass, field from datetime import datetime -from enum import Enum +from enum import StrEnum +from typing import Final from ulid import ULID from renku_data_services.authz.models import Role +from renku_data_services.base_models.core import NamespacePath, ProjectPath, ResourceType +from renku_data_services.errors import errors @dataclass(kw_only=True) @@ -55,27 +58,65 @@ class GroupMemberDetails: last_name: str | None = None -class NamespaceKind(str, Enum): +class NamespaceKind(StrEnum): """Allowed kinds of namespaces.""" - group: str = "group" - user: str = "user" + group = "group" + user = "user" + project = "project" # For now only applicable to data connectors + def to_resource_type(self) -> ResourceType: + """Conver the namespace kind to the corresponding resource type.""" + if self == NamespaceKind.group: + return ResourceType.group + elif self == NamespaceKind.user: + return ResourceType.user_namespace + elif self == NamespaceKind.project: + return ResourceType.project + raise errors.ProgrammingError(message=f"Unhandled namespace kind {self}") -@dataclass + +@dataclass(frozen=True, kw_only=True) class Namespace: """A renku namespace.""" id: ULID - slug: str kind: NamespaceKind created_by: str - underlying_resource_id: ULID | str # The user or group ID depending on the Namespace kind + path: NamespacePath | ProjectPath + underlying_resource_id: ULID | str # The user, group or project ID depending on the Namespace kind latest_slug: str | None = None name: str | None = None creation_date: datetime | None = None +@dataclass(frozen=True, kw_only=True) +class UserNamespace(Namespace): + """A renku user namespace.""" + + path: NamespacePath + underlying_resource_id: str # This corresponds to the keycloak user ID - which is not a ULID + kind: Final[NamespaceKind] = field(default=NamespaceKind.user, init=False) + + +@dataclass(frozen=True, kw_only=True) +class GroupNamespace(Namespace): + """A renku group namespace.""" + + path: NamespacePath + underlying_resource_id: ULID + kind: Final[NamespaceKind] = field(default=NamespaceKind.group, init=False) + + +@dataclass(frozen=True, kw_only=True) +class ProjectNamespace(Namespace): + """A renku project namespace.""" + + path: ProjectPath + underlying_resource_id: ULID + kind: Final[NamespaceKind] = field(default=NamespaceKind.project, init=False) + + @dataclass(frozen=True, eq=True, kw_only=True) class GroupPatch: """Model for changes requested on a group.""" @@ -93,14 +134,6 @@ class GroupUpdate: new: Group -@dataclass -class NamespaceUpdate: - """Information about the update of a namespace.""" - - old: Namespace - new: Namespace - - @dataclass class GroupPermissions: """The permissions of a user on a given group.""" diff --git a/components/renku_data_services/namespace/orm.py b/components/renku_data_services/namespace/orm.py index 625f30774..8830f527e 100644 --- a/components/renku_data_services/namespace/orm.py +++ b/components/renku_data_services/namespace/orm.py @@ -1,13 +1,14 @@ """SQLAlchemy's schemas for the group database.""" from datetime import datetime -from typing import Optional, Self, cast +from typing import Optional, Self from sqlalchemy import CheckConstraint, DateTime, Identity, Index, Integer, MetaData, String, func from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, relationship from sqlalchemy.schema import ForeignKey from ulid import ULID +from renku_data_services.base_models.core import NamespacePath from renku_data_services.base_orm.registry import COMMON_ORM_REGISTRY from renku_data_services.data_connectors.orm import DataConnectorORM from renku_data_services.errors import errors @@ -75,6 +76,13 @@ class NamespaceORM(BaseORM): user: Mapped[UserORM | None] = relationship( lazy="joined", back_populates="namespace", init=False, repr=False, viewonly=True ) + old_namespaces: Mapped[list["NamespaceOldORM"]] = relationship( + back_populates="latest_slug", + default_factory=list, + repr=False, + init=False, + viewonly=True, + ) @property def created_by(self) -> str: @@ -92,18 +100,6 @@ def creation_date(self) -> datetime | None: """When this namespace was created.""" return self.group.creation_date if self.group else None - @property - def underlying_resource_id(self) -> str | ULID: - """Return the id of the underlying resource.""" - if self.group_id is not None: - return self.group_id - elif self.user_id is not None: - return self.user_id - - raise errors.ProgrammingError( - message=f"Found a namespace {self.slug} that has no group or user associated with it." - ) - @property def name(self) -> str | None: """Return the name of the underlying resource.""" @@ -119,20 +115,45 @@ def name(self) -> str | None: message=f"Found a namespace {self.slug} that has no group or user associated with it." ) - def dump(self) -> models.Namespace: + def dump_group_namespace(self) -> models.GroupNamespace: """Create a namespace model from the ORM.""" - kind = models.NamespaceKind.group if self.group else models.NamespaceKind.user - return models.Namespace( + if not self.group_id: + raise errors.ProgrammingError( + message="Expected a valid group_id when dumping NamespaceORM as group namespace." + ) + return models.GroupNamespace( + id=self.id, + created_by=self.created_by, + creation_date=self.creation_date, + underlying_resource_id=self.group_id, + latest_slug=self.slug, + name=self.name, + path=NamespacePath.from_strings(self.slug), + ) + + def dump_user_namespace(self) -> models.UserNamespace: + """Create a namespace model from the ORM.""" + if self.user_id is None: + raise errors.ProgrammingError( + message="Expected a user_id in the NamespaceORM when dumping the object, but got None." + ) + return models.UserNamespace( id=self.id, - slug=self.slug, - kind=kind, created_by=self.created_by, creation_date=self.creation_date, - underlying_resource_id=self.underlying_resource_id, + underlying_resource_id=self.user_id, latest_slug=self.slug, name=self.name, + path=NamespacePath.from_strings(self.slug), ) + def dump(self) -> models.UserNamespace | models.GroupNamespace: + """Create a namespace model from the ORM.""" + if self.group_id: + return self.dump_group_namespace() + else: + return self.dump_user_namespace() + def dump_user(self) -> UserInfo: """Create a user with namespace from the ORM.""" if self.user is None: @@ -142,7 +163,7 @@ def dump_user(self) -> UserInfo: ) # NOTE: calling `self.user.dump()` can cause sqlalchemy greenlet errors, as it tries to fetch the namespace # again from the db, even though the back_populates should take care of this and not require loading. - ns = self.dump() + ns = self.dump_user_namespace() user_info = UserInfo( id=self.user.keycloak_id, first_name=self.user.first_name, @@ -153,15 +174,14 @@ def dump_user(self) -> UserInfo: return user_info @classmethod - def load(cls, ns: models.Namespace) -> Self: - """Create an ORM object from the user object.""" - match ns.kind: - case models.NamespaceKind.group: - return cls(slug=ns.slug, group_id=cast(ULID, ns.underlying_resource_id)) - case models.NamespaceKind.user: - return cls(slug=ns.slug, user_id=cast(str, ns.underlying_resource_id)) + def load_user(cls, ns: models.UserNamespace) -> Self: + """Create an ORM object from the user namespace object.""" + return cls(slug=ns.path.first.value, user_id=ns.underlying_resource_id) - raise errors.ValidationError(message=f"Unknown namespace kind {ns.kind}") + @classmethod + def load_group(cls, ns: models.GroupNamespace) -> Self: + """Create an ORM object from the group namespace object.""" + return cls(slug=ns.path.first.value, group_id=ns.underlying_resource_id) class NamespaceOldORM(BaseORM): @@ -177,18 +197,17 @@ class NamespaceOldORM(BaseORM): ) latest_slug: Mapped[NamespaceORM] = relationship(lazy="joined", init=False, viewonly=True, repr=False) - def dump(self) -> models.Namespace: + def dump(self) -> models.UserNamespace | models.GroupNamespace: """Create an namespace model from the ORM.""" if self.latest_slug.group_id and self.latest_slug.group: - return models.Namespace( + return models.GroupNamespace( id=self.id, - slug=self.slug, latest_slug=self.slug, created_by=self.latest_slug.created_by, creation_date=self.created_at, - kind=models.NamespaceKind.group, underlying_resource_id=self.latest_slug.group_id, name=self.latest_slug.group.name, + path=NamespacePath.from_strings(self.slug), ) if not self.latest_slug.user or not self.latest_slug.user_id: @@ -201,27 +220,44 @@ def dump(self) -> models.Namespace: if self.latest_slug.user.first_name and self.latest_slug.user.last_name else self.latest_slug.user.first_name or self.latest_slug.user.last_name ) - return models.Namespace( + return models.UserNamespace( id=self.id, - slug=self.slug, latest_slug=self.latest_slug.slug, created_by=self.latest_slug.user_id, creation_date=self.created_at, - kind=models.NamespaceKind.user, underlying_resource_id=self.latest_slug.user_id, name=name, + path=NamespacePath.from_strings(self.slug), ) + def dump_as_namespace_path(self) -> models.NamespacePath: + """Create a namespace path.""" + return self.dump().path + class EntitySlugORM(BaseORM): - """Entity slugs.""" + """Entity slugs. + + Note that valid combinations here are: + - namespace_id + project_id + - namespace_id + project_id + data_connector_id + - namespace_id + data_connector_id + """ __tablename__ = "entity_slugs" __table_args__ = ( - Index("entity_slugs_unique_slugs", "namespace_id", "slug", unique=True), + Index( + "entity_slugs_unique_slugs", + "namespace_id", + "project_id", + "data_connector_id", + "slug", + unique=True, + postgresql_nulls_not_distinct=True, + ), CheckConstraint( - "CAST (project_id IS NOT NULL AS int) + CAST (data_connector_id IS NOT NULL AS int) BETWEEN 0 AND 1", - name="either_project_id_or_data_connector_id_is_set", + "(project_id IS NOT NULL) OR (data_connector_id IS NOT NULL)", + name="one_or_both_project_id_or_group_id_are_set", ), ) @@ -230,11 +266,12 @@ class EntitySlugORM(BaseORM): project_id: Mapped[ULID | None] = mapped_column( ForeignKey(ProjectORM.id, ondelete="CASCADE", name="entity_slugs_project_id_fk"), index=True, nullable=True ) - project: Mapped[ProjectORM | None] = relationship(init=False, repr=False, back_populates="slug") + project: Mapped[ProjectORM | None] = relationship(init=False, repr=False, back_populates="slug", lazy="selectin") data_connector_id: Mapped[ULID | None] = mapped_column( ForeignKey(DataConnectorORM.id, ondelete="CASCADE", name="entity_slugs_data_connector_id_fk"), index=True, nullable=True, + unique=True, ) data_connector: Mapped[DataConnectorORM | None] = relationship(init=False, repr=False, back_populates="slug") namespace_id: Mapped[ULID] = mapped_column( @@ -253,15 +290,35 @@ def create_project_slug(cls, slug: str, project_id: ULID, namespace_id: ULID) -> ) @classmethod - def create_data_connector_slug(cls, slug: str, data_connector_id: ULID, namespace_id: ULID) -> "EntitySlugORM": + def create_data_connector_slug( + cls, + slug: str, + data_connector_id: ULID, + namespace_id: ULID, + project_id: ULID | None = None, + ) -> "EntitySlugORM": """Create an entity slug for a data connector.""" return cls( slug=slug, - project_id=None, + project_id=project_id, data_connector_id=data_connector_id, namespace_id=namespace_id, ) + def dump_namespace(self) -> models.UserNamespace | models.GroupNamespace | models.ProjectNamespace: + """Dump the entity slug as a namespace.""" + if self.project: + return self.dump_project_namespace() + return self.namespace.dump() + + def dump_project_namespace(self) -> models.ProjectNamespace: + """Dump the entity slug as a namespace.""" + if not self.project: + raise errors.ProgrammingError( + message="Attempting to dump a namespace without a project as a project namespace" + ) + return self.project.dump_as_namespace() + class EntitySlugOldORM(BaseORM): """Entity slugs history.""" @@ -276,7 +333,16 @@ class EntitySlugOldORM(BaseORM): latest_slug_id: Mapped[int] = mapped_column( ForeignKey(EntitySlugORM.id, ondelete="CASCADE"), nullable=False, - init=False, index=True, ) - latest_slug: Mapped[EntitySlugORM] = relationship(lazy="joined", repr=False, viewonly=True) + latest_slug: Mapped[EntitySlugORM] = relationship(lazy="joined", init=False, repr=False, viewonly=True) + project_id: Mapped[ULID | None] = mapped_column( + ForeignKey(ProjectORM.id, ondelete="CASCADE", name="entity_slugs_project_id_fk"), index=True, nullable=True + ) + project: Mapped[ProjectORM | None] = relationship(init=False, repr=False, viewonly=True, default=None) + data_connector_id: Mapped[ULID | None] = mapped_column( + ForeignKey(DataConnectorORM.id, ondelete="CASCADE", name="entity_slugs_data_connector_id_fk"), + index=True, + nullable=True, + ) + data_connector: Mapped[DataConnectorORM | None] = relationship(init=False, repr=False, viewonly=True, default=None) diff --git a/components/renku_data_services/notebooks/api.spec.yaml b/components/renku_data_services/notebooks/api.spec.yaml index 475124754..cf11d5cc5 100644 --- a/components/renku_data_services/notebooks/api.spec.yaml +++ b/components/renku_data_services/notebooks/api.spec.yaml @@ -71,26 +71,6 @@ paths: description: Server options such as CPU, memory, storage, etc. tags: - notebooks - "/notebooks/old/servers": - post: - summary: Launch a new session - requestBody: - required: true - content: - application/json: - schema: - $ref: "#/components/schemas/LaunchNotebookRequestOld" - responses: - "201": - description: The project was created - content: - application/json: - schema: - $ref: "#/components/schemas/NotebookResponse" - default: - $ref: "#/components/responses/Error" - tags: - - notebooks "/notebooks/servers": post: summary: Launch a new session @@ -99,7 +79,7 @@ paths: content: application/json: schema: - $ref: "#/components/schemas/LaunchNotebookRequest" + $ref: "#/components/schemas/LaunchNotebookRequestOld" responses: "201": description: The project was created @@ -439,6 +419,27 @@ components: - anonymous - registered type: object + EnvVariableOverrides: + description: Environment variable overrides for the session pod + type: array + items: + $ref: "#/components/schemas/EnvVarOverride" + EnvVarOverride: + description: Override an env variable defined in the session launcher + type: object + properties: + name: + type: string + maxLength: 256 + # based on https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_235 + pattern: "^[a-zA-Z_][a-zA-Z0-9_]*$" + example: MY_VAR + value: + type: string + maxLength: 500 + required: + - name + - value ErrorResponse: type: object properties: @@ -885,6 +886,8 @@ components: type: integer cloudstorage: $ref: "#/components/schemas/SessionCloudStoragePostList" + env_variable_overrides: + $ref: "#/components/schemas/EnvVariableOverrides" required: - launcher_id type: object diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/cloudstorage.py b/components/renku_data_services/notebooks/api/amalthea_patches/cloudstorage.py index 861f05ec6..010f89709 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/cloudstorage.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/cloudstorage.py @@ -1,5 +1,7 @@ """Cloud storage patches.""" +from __future__ import annotations + from typing import TYPE_CHECKING, Any if TYPE_CHECKING: @@ -8,7 +10,7 @@ from renku_data_services.notebooks.api.classes.server import UserServer -async def main(server: "UserServer") -> list[dict[str, Any]]: +async def main(server: UserServer) -> list[dict[str, Any]]: """Cloud storage patches.""" cloud_storage_patches: list[dict[str, Any]] = [] cloud_storage_request: ICloudStorageRequest @@ -17,9 +19,7 @@ async def main(server: "UserServer") -> list[dict[str, Any]]: repositories = await server.repositories() for i, cloud_storage_request in enumerate(server.cloudstorage): cloud_storage_patches.extend( - cloud_storage_request.get_manifest_patch( - f"{server.server_name}-ds-{i}", server.k8s_client.preferred_namespace - ) + cloud_storage_request.get_manifest_patch(f"{server.server_name}-ds-{i}", server.k8s_namespace()) ) if repositories: cloud_storage_patches.append( diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/general.py b/components/renku_data_services/notebooks/api/amalthea_patches/general.py index 12dcc0434..1b8d856d4 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/general.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/general.py @@ -1,5 +1,8 @@ """General patches for the jupyter server session.""" +from __future__ import annotations + +from numbers import Number from typing import TYPE_CHECKING, Any if TYPE_CHECKING: @@ -7,7 +10,7 @@ from renku_data_services.notebooks.api.classes.server import UserServer -def session_tolerations(server: "UserServer") -> list[dict[str, Any]]: +def session_tolerations(server: UserServer) -> list[dict[str, Any]]: """Patch for node taint tolerations. The static tolerations from the configuration are ignored @@ -37,7 +40,7 @@ def session_tolerations(server: "UserServer") -> list[dict[str, Any]]: ] -def session_affinity(server: "UserServer") -> list[dict[str, Any]]: +def session_affinity(server: UserServer) -> list[dict[str, Any]]: """Patch for session affinities. The static affinities from the configuration are ignored @@ -105,7 +108,7 @@ def session_affinity(server: "UserServer") -> list[dict[str, Any]]: ] -def session_node_selector(server: "UserServer") -> list[dict[str, Any]]: +def session_node_selector(server: UserServer) -> list[dict[str, Any]]: """Patch for a node selector. If node affinities are specified in the server options @@ -127,7 +130,7 @@ def session_node_selector(server: "UserServer") -> list[dict[str, Any]]: return [] -def priority_class(server: "UserServer") -> list[dict[str, Any]]: +def priority_class(server: UserServer) -> list[dict[str, Any]]: """Set the priority class for the session, used to enforce resource quotas.""" if server.server_options.priority_class is None: return [] @@ -145,7 +148,7 @@ def priority_class(server: "UserServer") -> list[dict[str, Any]]: ] -def test(server: "UserServer") -> list[dict[str, Any]]: +def test(server: UserServer) -> list[dict[str, Any]]: """Test the server patches. RFC 6901 patches support test statements that will cause the whole patch @@ -177,7 +180,7 @@ def test(server: "UserServer") -> list[dict[str, Any]]: return patches -def oidc_unverified_email(server: "UserServer") -> list[dict[str, Any]]: +def oidc_unverified_email(server: UserServer) -> list[dict[str, Any]]: """Allow users whose email is unverified in Keycloak to still be able to access their sessions.""" patches = [] if server.user.is_authenticated: @@ -201,35 +204,34 @@ def oidc_unverified_email(server: "UserServer") -> list[dict[str, Any]]: return patches -def dev_shm(server: "UserServer") -> list[dict[str, Any]]: +def dev_shm(server: UserServer) -> list[dict[str, Any]]: """Patches the /dev/shm folder used by some ML libraries for passing data between different processes.""" - patches = [] - if server.server_options.storage: - patches.append( - { - "type": "application/json-patch+json", - "patch": [ - { - "op": "add", - "path": "/statefulset/spec/template/spec/volumes/-", - "value": { - "name": "shm", - "emptyDir": { - "medium": "Memory", - # NOTE: We are giving /dev/shm up to half of the memory request - "sizeLimit": int(server.server_options.storage / 2), - }, + return [ + { + "type": "application/json-patch+json", + "patch": [ + { + "op": "add", + "path": "/statefulset/spec/template/spec/volumes/-", + "value": { + "name": "shm", + "emptyDir": { + "medium": "Memory", + # NOTE: We are giving /dev/shm up to half of the memory request + "sizeLimit": int(server.server_options.memory / 2) + if isinstance(server.server_options.memory, Number) + else "1Gi", }, }, - { - "op": "add", - "path": "/statefulset/spec/template/spec/containers/1/volumeMounts/-", - "value": { - "mountPath": "/dev/shm", # nosec B108 - "name": "shm", - }, + }, + { + "op": "add", + "path": "/statefulset/spec/template/spec/containers/0/volumeMounts/-", + "value": { + "mountPath": "/dev/shm", # nosec B108 + "name": "shm", }, - ], - } - ) - return patches + }, + ], + } + ] diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/git_proxy.py b/components/renku_data_services/notebooks/api/amalthea_patches/git_proxy.py index 38f738321..f4fcb5a26 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/git_proxy.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/git_proxy.py @@ -1,5 +1,7 @@ """Patches for the git proxy container.""" +from __future__ import annotations + import json from dataclasses import asdict from typing import TYPE_CHECKING, Any @@ -67,11 +69,11 @@ async def main_container( container = client.V1Container( image=config.sessions.git_proxy.image, security_context={ - "fsGroup": 100, "runAsGroup": 1000, "runAsUser": 1000, "allowPrivilegeEscalation": False, "runAsNonRoot": True, + "capabilities": {"drop": ["ALL"]}, }, name="git-proxy", env=env, @@ -97,7 +99,7 @@ async def main_container( return container -async def main(server: "UserServer") -> list[dict[str, Any]]: +async def main(server: UserServer) -> list[dict[str, Any]]: """The patch that adds the git proxy container to a session statefulset.""" repositories = await server.repositories() if not server.user.is_authenticated or not repositories: diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/git_sidecar.py b/components/renku_data_services/notebooks/api/amalthea_patches/git_sidecar.py index ceac8e248..495bf0481 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/git_sidecar.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/git_sidecar.py @@ -1,5 +1,7 @@ """Patches for the git sidecar container.""" +from __future__ import annotations + import os from typing import TYPE_CHECKING, Any @@ -8,7 +10,7 @@ from renku_data_services.notebooks.api.classes.server import UserServer -async def main(server: "UserServer") -> list[dict[str, Any]]: +async def main(server: UserServer) -> list[dict[str, Any]]: """Adds the git sidecar container to the session statefulset.""" # NOTE: Sessions can be persisted only for registered users if not server.user.is_authenticated: @@ -28,6 +30,7 @@ async def main(server: "UserServer") -> list[dict[str, Any]]: if gl_project_path: volume_mount["subPath"] = f"{gl_project_path}" + # noinspection PyListCreation patches = [ { "type": "application/json-patch+json", @@ -100,10 +103,10 @@ async def main(server: "UserServer") -> list[dict[str, Any]]: ], "securityContext": { "allowPrivilegeEscalation": False, - "fsGroup": 100, "runAsGroup": 1000, "runAsUser": 1000, "runAsNonRoot": True, + "capabilities": {"drop": ["ALL"]}, }, "volumeMounts": [volume_mount], "livenessProbe": { @@ -156,12 +159,12 @@ async def main(server: "UserServer") -> list[dict[str, Any]]: { "op": "add", "path": "/statefulset/spec/template/spec/containers/1/args/-", - "value": (f"--skip-auth-route=^/sessions/{server.server_name}/sidecar/health$"), + "value": f"--skip-auth-route=^/sessions/{server.server_name}/sidecar/health$", }, { "op": "add", "path": "/statefulset/spec/template/spec/containers/1/args/-", - "value": (f"--skip-auth-route=^/sessions/{server.server_name}/sidecar/health/$"), + "value": f"--skip-auth-route=^/sessions/{server.server_name}/sidecar/health/$", }, { "op": "add", @@ -171,7 +174,7 @@ async def main(server: "UserServer") -> list[dict[str, Any]]: { "op": "add", "path": "/statefulset/spec/template/spec/containers/1/args/-", - "value": (f"--skip-auth-route=^/sessions/{server.server_name}/sidecar/jsonrpc/map$"), + "value": f"--skip-auth-route=^/sessions/{server.server_name}/sidecar/jsonrpc/map$", }, { "op": "add", @@ -194,7 +197,7 @@ async def main(server: "UserServer") -> list[dict[str, Any]]: "kind": "Service", "metadata": { "name": f"{server.server_name}-rpc-server", - "namespace": server.k8s_client.preferred_namespace, + "namespace": server.k8s_namespace(), }, "spec": { "ports": [ diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py b/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py index 638dd6171..78c97f092 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py @@ -1,5 +1,7 @@ """Patches for init containers.""" +from __future__ import annotations + import json import os from dataclasses import asdict @@ -12,6 +14,9 @@ from renku_data_services.notebooks.api.amalthea_patches.utils import get_certificates_volume_mounts from renku_data_services.notebooks.api.classes.repository import GitProvider, Repository from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.crs import EmptyDir, ExtraVolume, ExtraVolumeMount, InitContainer, SecretAsVolume +from renku_data_services.project import constants as project_constants +from renku_data_services.project.models import SessionSecret if TYPE_CHECKING: # NOTE: If these are directly imported then you get circular imports. @@ -26,6 +31,8 @@ async def git_clone_container_v2( workspace_mount_path: PurePosixPath, work_dir: PurePosixPath, lfs_auto_fetch: bool = False, + uid: int = 1000, + gid: int = 1000, ) -> dict[str, Any] | None: """Returns the specification for the container that clones the user's repositories for new operator.""" amalthea_session_work_volume: str = "amalthea-volume" @@ -41,7 +48,6 @@ async def git_clone_container_v2( prefix = "GIT_CLONE_" env = [ - {"name": f"{prefix}WORKSPACE_MOUNT_PATH", "value": workspace_mount_path.as_posix()}, { "name": f"{prefix}MOUNT_PATH", "value": work_dir.as_posix(), @@ -133,10 +139,10 @@ async def git_clone_container_v2( }, "securityContext": { "allowPrivilegeEscalation": False, - "fsGroup": 100, - "runAsGroup": 100, - "runAsUser": 1000, + "runAsGroup": gid, + "runAsUser": uid, "runAsNonRoot": True, + "capabilities": {"drop": ["ALL"]}, }, "volumeMounts": [ { @@ -149,7 +155,7 @@ async def git_clone_container_v2( } -async def git_clone_container(server: "UserServer") -> dict[str, Any] | None: +async def git_clone_container(server: UserServer) -> dict[str, Any] | None: """Returns the specification for the container that clones the user's repositories.""" repositories = await server.repositories() if not repositories: @@ -164,13 +170,9 @@ async def git_clone_container(server: "UserServer") -> dict[str, Any] | None: prefix = "GIT_CLONE_" env = [ - { - "name": f"{prefix}WORKSPACE_MOUNT_PATH", - "value": server.workspace_mount_path.as_posix(), - }, { "name": f"{prefix}MOUNT_PATH", - "value": server.work_dir.as_posix(), + "value": server.workspace_mount_path.as_posix(), }, { "name": f"{prefix}LFS_AUTO_FETCH", @@ -258,7 +260,6 @@ async def git_clone_container(server: "UserServer") -> dict[str, Any] | None: }, "securityContext": { "allowPrivilegeEscalation": False, - "fsGroup": 100, "runAsGroup": 100, "runAsUser": 1000, "runAsNonRoot": True, @@ -274,7 +275,7 @@ async def git_clone_container(server: "UserServer") -> dict[str, Any] | None: } -async def git_clone(server: "UserServer") -> list[dict[str, Any]]: +async def git_clone(server: UserServer) -> list[dict[str, Any]]: """The patch for the init container that clones the git repository.""" container = await git_clone_container(server) if not container: @@ -304,6 +305,13 @@ def certificates_container(config: NotebooksConfig) -> tuple[client.V1Container, custom_certs=True, read_only_etc_certs=False, ), + security_context=client.V1SecurityContext( + allow_privilege_escalation=False, + run_as_group=1000, + run_as_user=1000, + run_as_non_root=True, + capabilities=client.V1Capabilities(drop=["ALL"]), + ), resources={ "requests": { "cpu": "50m", @@ -358,7 +366,7 @@ def certificates(config: NotebooksConfig) -> list[dict[str, Any]]: return patches -def download_image_container(server: "UserServer") -> client.V1Container: +def download_image_container(server: UserServer) -> client.V1Container: """Adds a container that does not do anything but simply downloads the session image at startup.""" container = client.V1Container( name="download-image", @@ -375,7 +383,7 @@ def download_image_container(server: "UserServer") -> client.V1Container: return container -def download_image(server: "UserServer") -> list[dict[str, Any]]: +def download_image(server: UserServer) -> list[dict[str, Any]]: """Adds a container that does not do anything but simply downloads the session image at startup.""" container = download_image_container(server) api_client = client.ApiClient() @@ -391,3 +399,66 @@ def download_image(server: "UserServer") -> list[dict[str, Any]]: ], }, ] + + +def user_secrets_container( + user: AuthenticatedAPIUser | AnonymousAPIUser, + config: NotebooksConfig, + secrets_mount_directory: str, + k8s_secret_name: str, + session_secrets: list[SessionSecret], +) -> tuple[InitContainer, list[ExtraVolume], list[ExtraVolumeMount]] | None: + """The init container which decrypts user secrets to be mounted in the session.""" + if not session_secrets or user.is_anonymous: + return None + + volume_k8s_secrets = ExtraVolume( + name=f"{k8s_secret_name}-volume", + secret=SecretAsVolume( + secretName=k8s_secret_name, + ), + ) + volume_decrypted_secrets = ExtraVolume(name="user-secrets-volume", emptyDir=EmptyDir(medium="Memory")) + + decrypted_volume_mount = ExtraVolumeMount( + name="user-secrets-volume", + mountPath=secrets_mount_directory or project_constants.DEFAULT_SESSION_SECRETS_MOUNT_DIR.as_posix(), + readOnly=True, + ) + + init_container = InitContainer.model_validate( + dict( + name="init-user-secrets", + image=config.user_secrets.image, + env=[ + dict(name="DATA_SERVICE_URL", value=config.data_service_url), + dict(name="RENKU_ACCESS_TOKEN", value=user.access_token or ""), + dict(name="ENCRYPTED_SECRETS_MOUNT_PATH", value="/encrypted"), + dict(name="DECRYPTED_SECRETS_MOUNT_PATH", value="/decrypted"), + ], + volumeMounts=[ + dict( + name=f"{k8s_secret_name}-volume", + mountPath="/encrypted", + readOnly=True, + ), + dict( + name="user-secrets-volume", + mountPath="/decrypted", + readOnly=False, + ), + ], + resources={ + "requests": { + "cpu": "50m", + "memory": "50Mi", + } + }, + ) + ) + + return ( + init_container, + [volume_k8s_secrets, volume_decrypted_secrets], + [decrypted_volume_mount], + ) diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/inject_certificates.py b/components/renku_data_services/notebooks/api/amalthea_patches/inject_certificates.py index e46707dc8..a7ae77c55 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/inject_certificates.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/inject_certificates.py @@ -1,5 +1,7 @@ """Patches for injecting custom certificates in session containers.""" +from __future__ import annotations + from pathlib import Path from typing import TYPE_CHECKING, Any @@ -10,7 +12,7 @@ from renku_data_services.notebooks.api.classes.server import UserServer -def proxy(server: "UserServer") -> list[dict[str, Any]]: +def proxy(server: UserServer) -> list[dict[str, Any]]: """Injects custom certificates volumes in the oauth2 proxy container.""" etc_cert_volume_mounts = get_certificates_volume_mounts( server.config, diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/jupyter_server.py b/components/renku_data_services/notebooks/api/amalthea_patches/jupyter_server.py index 6f7affc09..7c6099bd1 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/jupyter_server.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/jupyter_server.py @@ -1,4 +1,6 @@ -"""Patches the modify the jupyter container in the session.""" +"""Patches to modify the jupyter container in the session.""" + +from __future__ import annotations import base64 import json @@ -15,7 +17,7 @@ from renku_data_services.notebooks.api.classes.server import UserServer -def env(server: "UserServer") -> list[dict[str, Any]]: +def env(server: UserServer) -> list[dict[str, Any]]: """Injects environment variables in the jupyter container in the session. Amalthea always makes the jupyter server the first container in the statefulset @@ -93,8 +95,7 @@ def env(server: "UserServer") -> list[dict[str, Any]]: def args() -> list[dict[str, Any]]: """Sets the arguments for running the jupyter container.""" - patches = [] - patches.append( + patches = [ { "type": "application/json-patch+json", "patch": [ @@ -105,11 +106,11 @@ def args() -> list[dict[str, Any]]: } ], } - ) + ] return patches -def image_pull_secret(server: "UserServer", access_token: str | None) -> list[dict[str, Any]]: +def image_pull_secret(server: UserServer, access_token: str | None) -> list[dict[str, Any]]: """Adds an image pull secret to the session if the session image is not public.""" patches = [] if isinstance(server.user, AuthenticatedAPIUser) and server.is_image_private and access_token: @@ -138,7 +139,7 @@ def image_pull_secret(server: "UserServer", access_token: str | None) -> list[di "kind": "Secret", "metadata": { "name": image_pull_secret_name, - "namespace": server._k8s_client.preferred_namespace, + "namespace": server.k8s_namespace(), }, "type": "kubernetes.io/dockerconfigjson", }, @@ -177,7 +178,7 @@ def disable_service_links() -> list[dict[str, Any]]: ] -def rstudio_env_variables(server: "UserServer") -> list[dict[str, Any]]: +def rstudio_env_variables(server: UserServer) -> list[dict[str, Any]]: """Makes sure environment variables propagate for R and Rstudio. Since we cannot be certain that R/Rstudio is or isn't used we inject this every time @@ -233,7 +234,7 @@ def rstudio_env_variables(server: "UserServer") -> list[dict[str, Any]]: ] -def user_secrets(server: "UserServer") -> list[dict[str, Any]]: +def user_secrets(server: UserServer) -> list[dict[str, Any]]: """Patches to add volumes and corresponding mount volumes to the main container for user-requested secrets.""" if server.user_secrets is None: diff --git a/components/renku_data_services/notebooks/api/classes/data_service.py b/components/renku_data_services/notebooks/api/classes/data_service.py index 88bd5b744..31e1a2d2b 100644 --- a/components/renku_data_services/notebooks/api/classes/data_service.py +++ b/components/renku_data_services/notebooks/api/classes/data_service.py @@ -1,11 +1,10 @@ """Helpers for interacting wit the data service.""" from dataclasses import dataclass, field -from typing import Any, NamedTuple, Optional, cast +from typing import Optional from urllib.parse import urljoin, urlparse import httpx -from sanic.log import logger from renku_data_services.base_models import APIUser from renku_data_services.crc.db import ResourcePoolRepository @@ -18,108 +17,7 @@ ) from renku_data_services.notebooks.api.schemas.server_options import ServerOptions from renku_data_services.notebooks.errors.intermittent import IntermittentError -from renku_data_services.notebooks.errors.user import ( - AuthenticationError, - InvalidCloudStorageConfiguration, - InvalidComputeResourceError, - MissingResourceError, -) - - -class CloudStorageConfig(NamedTuple): - """Cloud storage configuration.""" - - config: dict[str, Any] - source_path: str - target_path: str - readonly: bool - name: str - - -@dataclass -class StorageValidator: - """Cloud storage validator.""" - - storage_url: str - - def __post_init__(self) -> None: - self.storage_url = self.storage_url.rstrip("/") - - async def get_storage_by_id( - self, user: APIUser, internal_gitlab_user: APIUser, project_id: int, storage_id: str - ) -> CloudStorageConfig: - """Get a specific cloud storage configuration by ID.""" - headers = None - if user is not None and user.access_token is not None and internal_gitlab_user.access_token is not None: - headers = { - "Authorization": f"bearer {user.access_token}", - "Gitlab-Access-Token": user.access_token, - } - # TODO: remove project_id once authz on the data service works properly - request_url = self.storage_url + f"/storage/{storage_id}?project_id={project_id}" - logger.info(f"getting storage info by id: {request_url}") - async with httpx.AsyncClient(timeout=10) as client: - res = await client.get(request_url, headers=headers) - if res.status_code == 404: - raise MissingResourceError(message=f"Couldn't find cloud storage with id {storage_id}") - if res.status_code == 401: - raise AuthenticationError("User is not authorized to access this storage on this project.") - if res.status_code != 200: - raise IntermittentError( - message="The data service sent an unexpected response, please try again later", - ) - storage = res.json()["storage"] - return CloudStorageConfig( - config=storage["configuration"], - source_path=storage["source_path"], - target_path=storage["target_path"], - readonly=storage.get("readonly", True), - name=storage["name"], - ) - - async def validate_storage_configuration(self, configuration: dict[str, Any], source_path: str) -> None: - """Validate the cloud storage configuration.""" - async with httpx.AsyncClient(timeout=10) as client: - res = await client.post(self.storage_url + "/storage_schema/validate", json=configuration) - if res.status_code == 422: - raise InvalidCloudStorageConfiguration( - message=f"The provided cloud storage configuration isn't valid: {res.json()}", - ) - if res.status_code != 204: - raise IntermittentError( - message="The data service sent an unexpected response, please try again later", - ) - - async def obscure_password_fields_for_storage(self, configuration: dict[str, Any]) -> dict[str, Any]: - """Obscures password fields for use with rclone.""" - async with httpx.AsyncClient(timeout=10) as client: - res = await client.post(self.storage_url + "/storage_schema/obscure", json=configuration) - - if res.status_code != 200: - raise InvalidCloudStorageConfiguration( - message=f"Couldn't obscure password fields for configuration: {res.json()}" - ) - - return cast(dict[str, Any], res.json()) - - -@dataclass -class DummyStorageValidator: - """Dummy cloud storage validator used for testing.""" - - async def get_storage_by_id( - self, user: APIUser, internal_gitlab_user: APIUser, project_id: int, storage_id: str - ) -> CloudStorageConfig: - """Get storage by ID.""" - raise NotImplementedError() - - async def validate_storage_configuration(self, configuration: dict[str, Any], source_path: str) -> None: - """Validate the cloud storage configuration.""" - raise NotImplementedError() - - async def obscure_password_fields_for_storage(self, configuration: dict[str, Any]) -> dict[str, Any]: - """Obscure the password fields in a cloud storage configuration.""" - raise NotImplementedError() +from renku_data_services.notebooks.errors.user import InvalidComputeResourceError @dataclass diff --git a/components/renku_data_services/notebooks/api/classes/image.py b/components/renku_data_services/notebooks/api/classes/image.py index 9a46aeb51..d9317c6b6 100644 --- a/components/renku_data_services/notebooks/api/classes/image.py +++ b/components/renku_data_services/notebooks/api/classes/image.py @@ -16,10 +16,10 @@ class ManifestTypes(Enum): """The mime types for docker image manifests.""" - docker_v2: str = "application/vnd.docker.distribution.manifest.v2+json" - docker_v2_list: str = "application/vnd.docker.distribution.manifest.list.v2+json" - oci_v1_manifest: str = "application/vnd.oci.image.manifest.v1+json" - oci_v1_index: str = "application/vnd.oci.image.index.v1+json" + docker_v2 = "application/vnd.docker.distribution.manifest.v2+json" + docker_v2_list = "application/vnd.docker.distribution.manifest.list.v2+json" + oci_v1_manifest = "application/vnd.oci.image.manifest.v1+json" + oci_v1_index = "application/vnd.oci.image.index.v1+json" DEFAULT_PLATFORM_ARCHITECTURE = "amd64" diff --git a/components/renku_data_services/notebooks/api/classes/k8s_client.py b/components/renku_data_services/notebooks/api/classes/k8s_client.py index ddfb9f252..9ce34fff6 100644 --- a/components/renku_data_services/notebooks/api/classes/k8s_client.py +++ b/components/renku_data_services/notebooks/api/classes/k8s_client.py @@ -2,54 +2,39 @@ import base64 import json -import logging -from contextlib import suppress -from typing import Any, Generic, Optional, TypeVar, cast -from urllib.parse import urljoin +from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, cast import httpx +import kubernetes +from box import Box from kr8s import NotFoundError, ServerError from kr8s.asyncio.objects import APIObject, Pod, Secret, StatefulSet -from kubernetes.client import ApiClient, V1Container, V1Secret +from kubernetes.client import V1Secret +from renku_data_services.base_models import APIUser +from renku_data_services.crc.db import ResourcePoolRepository from renku_data_services.errors import errors +from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER, ClusterId +from renku_data_services.k8s.models import GVK, Cluster, K8sObject, K8sObjectFilter, K8sObjectMeta from renku_data_services.notebooks.api.classes.auth import GitlabToken, RenkuTokens +from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK from renku_data_services.notebooks.crs import AmaltheaSessionV1Alpha1, JupyterServerV1Alpha1 -from renku_data_services.notebooks.errors.intermittent import ( - CannotStartServerError, - DeleteServerError, - IntermittentError, - JSCacheError, - PatchServerError, -) from renku_data_services.notebooks.errors.programming import ProgrammingError from renku_data_services.notebooks.util.kubernetes_ import find_env_var -from renku_data_services.notebooks.util.retries import ( - retry_with_exponential_backoff_async, -) +from renku_data_services.notebooks.util.retries import retry_with_exponential_backoff_async -sanitize_for_serialization = ApiClient().sanitize_for_serialization +if TYPE_CHECKING: + from renku_data_services.k8s.clients import K8sClusterClientsPool - -# NOTE The type ignore below is because the kr8s library has no type stubs, they claim pyright better handles type hints -class AmaltheaSessionV1Alpha1Kr8s(APIObject): - """Spec for amalthea sessions used by the k8s client.""" - - kind: str = "AmaltheaSession" - version: str = "amalthea.dev/v1alpha1" - namespaced: bool = True - plural: str = "amaltheasessions" - singular: str = "amaltheasession" - scalable: bool = False - endpoint: str = "amaltheasessions" +sanitizer = kubernetes.client.ApiClient().sanitize_for_serialization # NOTE The type ignore below is because the kr8s library has no type stubs, they claim pyright better handles type hints class JupyterServerV1Alpha1Kr8s(APIObject): """Spec for jupyter servers used by the k8s client.""" - kind: str = "JupyterServer" - version: str = "amalthea.dev/v1alpha1" + kind: str = JUPYTER_SESSION_GVK.kind + version: str = JUPYTER_SESSION_GVK.group_version namespaced: bool = True plural: str = "jupyterservers" singular: str = "jupyterserver" @@ -58,205 +43,37 @@ class JupyterServerV1Alpha1Kr8s(APIObject): _SessionType = TypeVar("_SessionType", JupyterServerV1Alpha1, AmaltheaSessionV1Alpha1) -_Kr8sType = TypeVar("_Kr8sType", JupyterServerV1Alpha1Kr8s, AmaltheaSessionV1Alpha1Kr8s) - - -class NamespacedK8sClient(Generic[_SessionType, _Kr8sType]): - """A kubernetes client that operates in a specific namespace.""" - - def __init__(self, namespace: str, server_type: type[_SessionType], kr8s_type: type[_Kr8sType]): - self.namespace = namespace - self.server_type: type[_SessionType] = server_type - self._kr8s_type: type[_Kr8sType] = kr8s_type - if (self.server_type == AmaltheaSessionV1Alpha1 and self._kr8s_type == JupyterServerV1Alpha1Kr8s) or ( - self.server_type == JupyterServerV1Alpha1 and self._kr8s_type == AmaltheaSessionV1Alpha1Kr8s - ): - raise errors.ProgrammingError(message="Incompatible manifest and client types in k8s client") - self.sanitize = ApiClient().sanitize_for_serialization - - async def get_pod_logs(self, name: str, max_log_lines: Optional[int] = None) -> dict[str, str]: - """Get the logs of all containers in the session.""" - pod = await Pod.get(name=name, namespace=self.namespace) - logs: dict[str, str] = {} - containers = [container.name for container in pod.spec.containers + pod.spec.get("initContainers", [])] - for container in containers: - try: - # NOTE: calling pod.logs without a container name set crashes the library - clogs: list[str] = [clog async for clog in pod.logs(container=container, tail_lines=max_log_lines)] - except httpx.ResponseNotRead: - # NOTE: This occurs when the container is still starting but we try to read its logs - continue - except NotFoundError: - raise errors.MissingResourceError(message=f"The session pod {name} does not exist.") - except ServerError as err: - if err.status == 404: - raise errors.MissingResourceError(message=f"The session pod {name} does not exist.") - raise - else: - logs[container] = "\n".join(clogs) - return logs - - async def get_secret(self, name: str) -> Secret | None: - """Read a specific secret from the cluster.""" - try: - secret = await Secret.get(name, self.namespace) - except NotFoundError: - return None - return secret - - async def create_server(self, manifest: _SessionType) -> _SessionType: - """Create a jupyter server in the cluster.""" - # NOTE: You have to exclude none when using model dump below because otherwise we get - # namespace=null which seems to break the kr8s client or simply k8s does not translate - # namespace = null to the default namespace. - manifest.metadata.namespace = self.namespace - js = await self._kr8s_type(manifest.model_dump(exclude_none=True, mode="json")) - server_name = manifest.metadata.name - try: - await js.create() - except ServerError as e: - logging.exception(f"Cannot start server {server_name} because of {e}") - raise CannotStartServerError( - message=f"Cannot start the session {server_name}", - ) - # NOTE: If refresh is not called then upon creating the object the status is blank - await js.refresh() - # NOTE: We wait for the cache to sync with the newly created server - # If not then the user will get a non-null response from the POST request but - # then immediately after a null response because the newly created server has - # not made it into the cache. With this we wait for the cache to catch up - # before we send the response from the POST request out. Exponential backoff - # is used to avoid overwhelming the cache. - server = await retry_with_exponential_backoff_async(lambda x: x is None)(self.get_server)(server_name) - if server is None: - raise CannotStartServerError(message=f"Cannot start the session {server_name}") - return server - - async def patch_server(self, server_name: str, patch: dict[str, Any] | list[dict[str, Any]]) -> _SessionType: - """Patch the server.""" - server = await self._kr8s_type(dict(metadata=dict(name=server_name, namespace=self.namespace))) - patch_type: str | None = None # rfc7386 patch - if isinstance(patch, list): - patch_type = "json" # rfc6902 patch - try: - await server.patch(patch, type=patch_type) - except ServerError as e: - logging.exception(f"Cannot patch server {server_name} because of {e}") - raise PatchServerError() - - return self.server_type.model_validate(server.to_dict()) - async def patch_statefulset( - self, server_name: str, patch: dict[str, Any] | list[dict[str, Any]] - ) -> StatefulSet | None: - """Patch a statefulset.""" - sts = await StatefulSet(dict(metadata=dict(name=server_name, namespace=self.namespace))) - patch_type: str | None = None # rfc7386 patch - if isinstance(patch, list): - patch_type = "json" # rfc6902 patch - try: - await sts.patch(patch, type=patch_type) - except ServerError as err: - if err.status == 404: - # NOTE: It can happen potentially that another request or something else - # deleted the session as this request was going on, in this case we ignore - # the missing statefulset - return None - raise - return cast(StatefulSet, sts) - - async def delete_server(self, server_name: str) -> None: - """Delete the server.""" - server = await self._kr8s_type(dict(metadata=dict(name=server_name, namespace=self.namespace))) - try: - await server.delete(propagation_policy="Foreground") - except ServerError as e: - logging.exception(f"Cannot delete server {server_name} because of {e}") - raise DeleteServerError() - return None - - async def get_server(self, name: str) -> _SessionType | None: - """Get a specific JupyterServer object.""" - try: - server = await self._kr8s_type.get(name=name, namespace=self.namespace) - except NotFoundError: - return None - except ServerError as err: - if err.status not in [400, 404]: - logging.exception(f"Cannot get server {name} because of {err}") - raise IntermittentError(f"Cannot get server {name} from the k8s API.") - return None - return self.server_type.model_validate(server.to_dict()) - async def list_servers(self, label_selector: Optional[str] = None) -> list[_SessionType]: - """Get a list of k8s jupyterserver objects for a specific user.""" - try: - servers = await self._kr8s_type.list(namespace=self.namespace, label_selector=label_selector) - except ServerError as err: - if err.status not in [400, 404]: - logging.exception(f"Cannot list servers because of {err}") - raise IntermittentError(f"Cannot list servers from the k8s API with selector {label_selector}.") - return [] - output: list[_SessionType] - if isinstance(servers, APIObject): - output = [self.server_type.model_validate(servers.to_dict())] - else: - output = [self.server_type.model_validate(server.to_dict()) for server in servers] - - return output - - async def patch_image_pull_secret(self, server_name: str, gitlab_token: GitlabToken) -> None: - """Patch the image pull secret used in a Renku session.""" - secret_name = f"{server_name}-image-secret" - try: - secret = await Secret.get(name=secret_name, namespace=self.namespace) - except NotFoundError: - return None - secret_data = secret.data.to_dict() - old_docker_config = json.loads(base64.b64decode(secret_data[".dockerconfigjson"]).decode()) - hostname = next(iter(old_docker_config["auths"].keys()), None) - if not hostname: - raise ProgrammingError( - "Failed to refresh the access credentials in the image pull secret.", - detail="Please contact a Renku administrator.", - ) - new_docker_config = { - "auths": { - hostname: { - "Username": "oauth2", - "Password": gitlab_token.access_token, - "Email": old_docker_config["auths"][hostname]["Email"], - } - } - } - patch_path = "/data/.dockerconfigjson" - patch = [ - { - "op": "replace", - "path": patch_path, - "value": base64.b64encode(json.dumps(new_docker_config).encode()).decode(), - } - ] - await secret.patch(patch, type="json") +class NotebookK8sClient(Generic[_SessionType]): + """A K8s Client for Notebooks.""" - async def patch_statefulset_tokens(self, name: str, renku_tokens: RenkuTokens) -> None: + def __init__( + self, + client: "K8sClusterClientsPool", + rp_repo: ResourcePoolRepository, + session_type: type[_SessionType], + username_label: str, + gvk: GVK, + ) -> None: + self.__client = client + self.__rp_repo = rp_repo + self.__session_type: type[_SessionType] = session_type + self.__session_gvk = gvk + self.__username_label = username_label + + @staticmethod + def _get_statefulset_token_patches(sts: StatefulSet, renku_tokens: RenkuTokens) -> list[dict[str, str]]: """Patch the Renku and Gitlab access tokens that are used in the session statefulset.""" - try: - sts = await StatefulSet.get(name=name, namespace=self.namespace) - except NotFoundError: - return None - - containers: list[V1Container] = [V1Container(**container) for container in sts.spec.template.spec.containers] - init_containers: list[V1Container] = [ - V1Container(**container) for container in sts.spec.template.spec.init_containers - ] + containers = cast(list[Box], sts.spec.template.spec.containers) + init_containers = cast(list[Box], sts.spec.template.spec.initContainers) git_proxy_container_index, git_proxy_container = next( ((i, c) for i, c in enumerate(containers) if c.name == "git-proxy"), (None, None), ) git_clone_container_index, git_clone_container = next( - ((i, c) for i, c in enumerate(init_containers) if c.name == "git-proxy"), + ((i, c) for i, c in enumerate(init_containers) if c.name == "git-clone"), (None, None), ) secrets_container_index, secrets_container = next( @@ -264,23 +81,26 @@ async def patch_statefulset_tokens(self, name: str, renku_tokens: RenkuTokens) - (None, None), ) + def _get_env(container: Box) -> list[Box]: + return cast(list[Box], container.env) + git_proxy_renku_access_token_env = ( - find_env_var(git_proxy_container, "GIT_PROXY_RENKU_ACCESS_TOKEN") + find_env_var(_get_env(git_proxy_container), "GIT_PROXY_RENKU_ACCESS_TOKEN") if git_proxy_container is not None else None ) git_proxy_renku_refresh_token_env = ( - find_env_var(git_proxy_container, "GIT_PROXY_RENKU_REFRESH_TOKEN") + find_env_var(_get_env(git_proxy_container), "GIT_PROXY_RENKU_REFRESH_TOKEN") if git_proxy_container is not None else None ) git_clone_renku_access_token_env = ( - find_env_var(git_clone_container, "GIT_CLONE_USER__RENKU_TOKEN") + find_env_var(_get_env(git_clone_container), "GIT_CLONE_USER__RENKU_TOKEN") if git_clone_container is not None else None ) secrets_renku_access_token_env = ( - find_env_var(secrets_container, "RENKU_ACCESS_TOKEN") if secrets_container is not None else None + find_env_var(_get_env(secrets_container), "RENKU_ACCESS_TOKEN") if secrets_container is not None else None ) patches = list() @@ -311,7 +131,7 @@ async def patch_statefulset_tokens(self, name: str, renku_tokens: RenkuTokens) - { "op": "replace", "path": ( - f"/spec/template/spec/containers/{git_clone_container_index}" + f"/spec/template/spec/initContainers/{git_clone_container_index}" f"/env/{git_clone_renku_access_token_env[0]}/value" ), "value": renku_tokens.access_token, @@ -322,197 +142,329 @@ async def patch_statefulset_tokens(self, name: str, renku_tokens: RenkuTokens) - { "op": "replace", "path": ( - f"/spec/template/spec/containers/{secrets_container_index}" + f"/spec/template/spec/initContainers/{secrets_container_index}" f"/env/{secrets_renku_access_token_env[0]}/value" ), "value": renku_tokens.access_token, }, ) - if not patches: - return None - - await sts.patch(patches, type="json") - - async def create_secret(self, secret: V1Secret) -> V1Secret: - """Create a new secret.""" - - new_secret = await Secret(self.sanitize(secret), self.namespace) - await new_secret.create() - return V1Secret(metadata=new_secret.metadata, data=new_secret.data, type=new_secret.raw.get("type")) + return patches + + async def _get(self, name: str, gvk: GVK, safe_username: str | None) -> K8sObject | None: + """Get a specific object, None is returned if it does not exist.""" + objects = [ + o + async for o in self.__client.list( + K8sObjectFilter( + gvk=gvk, + user_id=safe_username, + name=name, + ) + ) + ] + if len(objects) == 1: + return objects[0] - async def delete_secret(self, name: str) -> None: - """Delete a secret.""" - secret = await Secret(dict(metadata=dict(name=name, namespace=self.namespace))) - with suppress(NotFoundError): - await secret.delete() return None + def namespace(self) -> str: + """Current namespace of the main cluster.""" + return self.__client.cluster_by_id(self.cluster_id()).namespace -class ServerCache(Generic[_SessionType]): - """Utility class for calling the jupyter server cache.""" + @staticmethod + def cluster_id() -> ClusterId: + """Cluster id of the main cluster.""" + return DEFAULT_K8S_CLUSTER - def __init__(self, url: str, server_type: type[_SessionType]): - self.url = url - self.client = httpx.AsyncClient(timeout=10) - self.server_type: type[_SessionType] = server_type - self.url_path_name = "servers" - if server_type == AmaltheaSessionV1Alpha1: - self.url_path_name = "sessions" + async def cluster_by_class_id(self, class_id: int | None, api_user: APIUser) -> Cluster: + """Return the cluster associated with the given resource class id.""" + cluster_id = self.cluster_id() - async def list_servers(self, safe_username: str) -> list[_SessionType]: - """List the jupyter servers.""" - url = urljoin(self.url, f"/users/{safe_username}/{self.url_path_name}") - try: - res = await self.client.get(url, timeout=10) - except httpx.RequestError as err: - logging.warning(f"Jupyter server cache at {url} cannot be reached: {err}") - raise JSCacheError("The jupyter server cache is not available") - if res.status_code != 200: - logging.warning( - f"Listing servers at {url} from " - f"jupyter server cache failed with status code: {res.status_code} " - f"and body: {res.text}" + if class_id is not None: + try: + rp = await self.__rp_repo.get_resource_pool_from_class(api_user, class_id) + if rp.cluster is not None: + cluster_id = ClusterId(str(rp.cluster.id)) + except errors.MissingResourceError: + pass + + return self.__client.cluster_by_id(cluster_id) + + async def list_sessions(self, safe_username: str) -> list[_SessionType]: + """Get a list of sessions that belong to a user.""" + sessions = [ + self.__session_type.model_validate(s.manifest) + async for s in self.__client.list( + K8sObjectFilter( + gvk=self.__session_gvk, + user_id=safe_username, + label_selector={self.__username_label: safe_username}, + ) ) - raise JSCacheError(f"The JSCache produced an unexpected status code: {res.status_code}") + ] + return sorted(sessions, key=lambda sess: sess.metadata.name) - return [self.server_type.model_validate(server) for server in res.json()] + async def get_session(self, name: str, safe_username: str) -> _SessionType | None: + """Get a specific session, None is returned if the session does not exist.""" + session = await self._get(name, self.__session_gvk, safe_username) - async def get_server(self, name: str) -> _SessionType | None: - """Get a specific jupyter server.""" - url = urljoin(self.url, f"/{self.url_path_name}/{name}") - try: - res = await self.client.get(url, timeout=10) - except httpx.RequestError as err: - logging.warning(f"Jupyter server cache at {url} cannot be reached: {err}") - raise JSCacheError("The jupyter server cache is not available") - if res.status_code != 200: - logging.warning( - f"Reading server at {url} from " - f"jupyter server cache failed with status code: {res.status_code} " - f"and body: {res.text}" - ) - raise JSCacheError(f"The JSCache produced an unexpected status code: {res.status_code}") - output = res.json() - if len(output) == 0: + if session is None: return None - if len(output) > 1: - raise ProgrammingError(f"Expected to find 1 server when getting server {name}, " f"found {len(output)}.") - return self.server_type.model_validate(output[0]) + return self.__session_type.model_validate(session.manifest) + + async def create_session(self, manifest: _SessionType, api_user: APIUser) -> _SessionType: + """Launch a user session.""" + if api_user.id is None: + raise ProgrammingError(message=f"API user id un set for {api_user}.") + + session_name = manifest.metadata.name + + session = await self.get_session(session_name, api_user.id) + if session is not None: + # NOTE: session already exists + return session + + cluster = await self.cluster_by_class_id(manifest.resource_class_id(), api_user) + + manifest.metadata.labels[self.__username_label] = api_user.id + session = await self.__client.create( + K8sObject( + name=session_name, + namespace=cluster.namespace, + cluster=cluster.id, + gvk=self.__session_gvk, + user_id=api_user.id, + manifest=Box(manifest.model_dump(exclude_none=True, mode="json")), + ) + ) + # NOTE: We wait for the cache to sync with the newly created server + # With this we wait for the cache to catch up before we return a result. + def _check_ready(obj: K8sObject | None) -> bool: + return obj is None or obj.manifest.metadata.get("creationTimestamp") is None -class K8sClient(Generic[_SessionType, _Kr8sType]): - """The K8s client that combines a namespaced client and a jupyter server cache.""" + refreshed_session = await retry_with_exponential_backoff_async(_check_ready)(self.__client.get)(session) + if refreshed_session is not None: + session = refreshed_session - def __init__( - self, - cache: ServerCache[_SessionType], - renku_ns_client: NamespacedK8sClient[_SessionType, _Kr8sType], - username_label: str, - ): - self.cache: ServerCache[_SessionType] = cache - self.renku_ns_client: NamespacedK8sClient[_SessionType, _Kr8sType] = renku_ns_client - self.username_label = username_label - if not self.username_label: - raise ProgrammingError("username_label has to be provided to K8sClient") - self.sanitize = self.renku_ns_client.sanitize - - async def list_servers(self, safe_username: str) -> list[_SessionType]: - """Get a list of servers that belong to a user. - - Attempt to use the cache first but if the cache fails then use the k8s API. - """ - try: - return await self.cache.list_servers(safe_username) - except JSCacheError: - logging.warning(f"Skipping the cache to list servers for user: {safe_username}") - label_selector = f"{self.username_label}={safe_username}" - return await self.renku_ns_client.list_servers(label_selector) - - async def get_server(self, name: str, safe_username: str) -> _SessionType | None: - """Attempt to get a specific server by name from the cache. - - If the request to the cache fails, fallback to the k8s API. - """ - server = None - try: - server = await self.cache.get_server(name) - except JSCacheError: - server = await self.renku_ns_client.get_server(name) + return self.__session_type.model_validate(session.manifest) - if server and server.metadata and server.metadata.labels.get(self.username_label) != safe_username: - return None - return server - - async def get_server_logs( - self, server_name: str, safe_username: str, max_log_lines: Optional[int] = None - ) -> dict[str, str]: - """Get the logs from the server.""" - # NOTE: this get_server ensures the user has access to the server without it you could read someone elses logs - server = await self.get_server(server_name, safe_username) - if not server: - raise errors.MissingResourceError( - message=f"Cannot find server {server_name} for user {safe_username} to retrieve logs." - ) - pod_name = f"{server_name}-0" - return await self.renku_ns_client.get_pod_logs(pod_name, max_log_lines) - - async def _get_secret(self, name: str) -> Secret | None: - """Get a specific secret.""" - return await self.renku_ns_client.get_secret(name) - - async def create_server(self, manifest: _SessionType, safe_username: str) -> _SessionType: - """Create a server.""" - server_name = manifest.metadata.name - server = await self.get_server(server_name, safe_username) - if server: - # NOTE: server already exists - return server - manifest.metadata.labels[self.username_label] = safe_username - return await self.renku_ns_client.create_server(manifest) - - async def patch_server( - self, server_name: str, safe_username: str, patch: dict[str, Any] | list[dict[str, Any]] + async def patch_session( + self, session_name: str, safe_username: str, patch: dict[str, Any] | list[dict[str, Any]] ) -> _SessionType: - """Patch a server.""" - server = await self.get_server(server_name, safe_username) - if not server: + """Patch a session.""" + session = await self._get(session_name, self.__session_gvk, safe_username) + if session is None: raise errors.MissingResourceError( - message=f"Cannot find server {server_name} for user {safe_username} in order to patch it." + message=f"Cannot find session {session_name} for user {safe_username} in order to patch it." ) - return await self.renku_ns_client.patch_server(server_name=server_name, patch=patch) + + result = await self.__client.patch(session, patch) + return self.__session_type.model_validate(result.manifest) + + async def delete_session(self, session_name: str, safe_username: str) -> None: + """Delete the session.""" + session = await self._get(session_name, self.__session_gvk, safe_username) + if session is not None: + await self.__client.delete(session) + + async def get_statefulset(self, session_name: str, safe_username: str) -> StatefulSet | None: + """Return the statefulset for the given user session.""" + statefulset = await self._get(session_name, GVK.from_kr8s_object(StatefulSet), safe_username) + if statefulset is None: + return None + + cluster = self.__client.cluster_by_id(statefulset.cluster) + if cluster is None: + return None + + return StatefulSet( + resource=statefulset.to_api_object(cluster.api), namespace=statefulset.namespace, api=cluster.api + ) async def patch_statefulset( - self, server_name: str, patch: dict[str, Any] | list[dict[str, Any]] + self, session_name: str, safe_username: str, patch: dict[str, Any] | list[dict[str, Any]] ) -> StatefulSet | None: """Patch a statefulset.""" - client = self.renku_ns_client - return await client.patch_statefulset(server_name=server_name, patch=patch) + sts = await self.get_statefulset(session_name, safe_username) + if sts is None: + return None + + patch_type: str | None = None # rfc7386 patch + if isinstance(patch, list): + patch_type = "json" # rfc6902 patch + + try: + await sts.patch(patch=patch, type=patch_type) + except ServerError as err: + if err.response is not None and err.response.status_code == 404: + # NOTE: It can happen potentially that another request or something else + # deleted the session as this request was going on, in this case we ignore + # the missing statefulset + return None + raise - async def delete_server(self, server_name: str, safe_username: str) -> None: - """Delete the server.""" - server = await self.get_server(server_name, safe_username) - if not server: + return sts + + async def patch_statefulset_tokens(self, session_name: str, renku_tokens: RenkuTokens, safe_username: str) -> None: + """Patch the Renku and Gitlab access tokens used in a session.""" + sts = await self.get_statefulset(session_name, safe_username) + if sts is None: + return + patches = self._get_statefulset_token_patches(sts, renku_tokens) + await sts.patch(patch=patches, type="json") + + async def patch_session_tokens( + self, session_name: str, safe_username: str, renku_tokens: RenkuTokens, gitlab_token: GitlabToken + ) -> None: + """Patch the Renku and Gitlab access tokens used in a session.""" + await self.patch_statefulset_tokens(session_name, renku_tokens, safe_username) + await self.patch_image_pull_secret(session_name, gitlab_token, safe_username) + + async def get_session_logs( + self, session_name: str, safe_username: str, max_log_lines: Optional[int] = None + ) -> dict[str, str]: + """Get the logs from the session.""" + # NOTE: this get_session ensures the user has access to the session, without this you could read someone else's + # logs + session = await self.get_session(session_name, safe_username) + if session is None: raise errors.MissingResourceError( - message=f"Cannot find server {server_name} for user {safe_username} in order to delete it." + message=f"Cannot find session {session_name} for user {safe_username} to retrieve logs." ) - return await self.renku_ns_client.delete_server(server_name) + pod_name = f"{session_name}-0" + result = await self._get(pod_name, GVK.from_kr8s_object(Pod), None) - async def patch_tokens(self, server_name: str, renku_tokens: RenkuTokens, gitlab_token: GitlabToken) -> None: - """Patch the Renku and Gitlab access tokens used in a session.""" - client = self.renku_ns_client - await client.patch_statefulset_tokens(server_name, renku_tokens) - await client.patch_image_pull_secret(server_name, gitlab_token) + logs: dict[str, str] = {} + if result is None: + return logs + + cluster = self.__client.cluster_by_id(result.cluster) + if cluster is None: + return logs + + pod = Pod(resource=result.to_api_object(cluster.api), namespace=result.namespace, api=cluster.api) + + containers = [container.name for container in pod.spec.containers + pod.spec.get("initContainers", [])] + for container in containers: + try: + # NOTE: calling pod.logs without a container name set crashes the library + clogs: list[str] = [clog async for clog in pod.logs(container=container, tail_lines=max_log_lines)] + except (httpx.ResponseNotRead, httpx.HTTPStatusError): + # NOTE: This occurs when the container is still starting, but we try to read its logs + continue + except NotFoundError as err: + raise errors.MissingResourceError(message=f"The session pod {pod_name} does not exist.") from err + except ServerError as err: + if err.response is not None and err.response.status_code == 400: + # NOTE: This occurs when the target container is not yet running, but we try to read its logs + continue + if err.response is not None and err.response.status_code == 404: + raise errors.MissingResourceError(message=f"The session pod {pod_name} does not exist.") from err + raise + else: + logs[container] = "\n".join(clogs) + return logs + + async def patch_image_pull_secret(self, session_name: str, gitlab_token: GitlabToken, safe_username: str) -> None: + """Patch the image pull secret used in a Renku session.""" + secret_name = f"{session_name}-image-secret" + result = await self._get(secret_name, GVK.from_kr8s_object(Secret), safe_username) + if result is None: + return + + cluster = self.__client.cluster_by_id(result.cluster) + if cluster is None: + return + + secret = Secret(resource=result.to_api_object(cluster.api), namespace=result.namespace, api=cluster.api) - @property - def preferred_namespace(self) -> str: - """Get the preferred namespace for creating jupyter servers.""" - return self.renku_ns_client.namespace + secret_data = secret.data.to_dict() + old_docker_config = json.loads(base64.b64decode(secret_data[".dockerconfigjson"]).decode()) + hostname = next(iter(old_docker_config["auths"].keys()), None) + if not hostname: + raise ProgrammingError( + "Failed to refresh the access credentials in the image pull secret.", + detail="Please contact a Renku administrator.", + ) + new_docker_config = { + "auths": { + hostname: { + "Username": "oauth2", + "Password": gitlab_token.access_token, + "Email": old_docker_config["auths"][hostname]["Email"], + } + } + } + patch_path = "/data/.dockerconfigjson" + patch = [ + { + "op": "replace", + "path": patch_path, + "value": base64.b64encode(json.dumps(new_docker_config).encode()).decode(), + } + ] + await secret.patch(patch, type="json") - async def create_secret(self, secret: V1Secret) -> V1Secret: + async def create_secret(self, secret: V1Secret, cluster: Cluster) -> V1Secret: """Create a secret.""" - return await self.renku_ns_client.create_secret(secret) - async def delete_secret(self, name: str) -> None: + assert secret.metadata is not None + + secret_obj = K8sObject( + name=secret.metadata.name, + namespace=cluster.namespace, + cluster=cluster.id, + gvk=GVK(kind=Secret.kind, version=Secret.version), + manifest=Box(sanitizer(secret)), + ) + try: + result = await self.__client.create(secret_obj) + except ServerError as err: + if err.response and err.response.status_code == 409: + annotations: Box | None = secret_obj.manifest.metadata.get("annotations") + labels: Box | None = secret_obj.manifest.metadata.get("labels") + patches = [ + { + "op": "replace", + "path": "/data", + "value": secret.data or {}, + }, + { + "op": "replace", + "path": "/stringData", + "value": secret.string_data or {}, + }, + { + "op": "replace", + "path": "/metadata/annotations", + "value": annotations.to_dict() if annotations is not None else {}, + }, + { + "op": "replace", + "path": "/metadata/labels", + "value": labels.to_dict() if labels is not None else {}, + }, + ] + result = await self.__client.patch(secret_obj, patches) + else: + raise + return V1Secret( + metadata=result.manifest.metadata, + data=result.manifest.get("data", {}), + string_data=result.manifest.get("stringData", {}), + type=result.manifest.get("type"), + ) + + async def delete_secret(self, name: str, cluster: Cluster) -> None: """Delete a secret.""" - return await self.renku_ns_client.delete_secret(name) + + await self.__client.delete( + K8sObjectMeta( + name=name, + namespace=cluster.namespace, + cluster=cluster.id, + gvk=GVK(kind=Secret.kind, version=Secret.version), + ) + ) diff --git a/components/renku_data_services/notebooks/api/classes/server.py b/components/renku_data_services/notebooks/api/classes/server.py index 3add6cec6..6451eddbf 100644 --- a/components/renku_data_services/notebooks/api/classes/server.py +++ b/components/renku_data_services/notebooks/api/classes/server.py @@ -1,15 +1,14 @@ """Jupyter server models.""" -from abc import ABC from collections.abc import Sequence from itertools import chain from pathlib import PurePosixPath from typing import Any -from urllib.parse import urljoin, urlparse +from urllib.parse import urlparse from gitlab.v4.objects.projects import Project -from sanic.log import logger +from renku_data_services.app_config import logging from renku_data_services.base_models import AnonymousAPIUser, AuthenticatedAPIUser from renku_data_services.base_models.core import APIUser from renku_data_services.notebooks.api.amalthea_patches import cloudstorage as cloudstorage_patches @@ -21,17 +20,20 @@ from renku_data_services.notebooks.api.amalthea_patches import jupyter_server as jupyter_server_patches from renku_data_services.notebooks.api.amalthea_patches import ssh as ssh_patches from renku_data_services.notebooks.api.classes.cloud_storage import ICloudStorageRequest -from renku_data_services.notebooks.api.classes.k8s_client import JupyterServerV1Alpha1Kr8s, K8sClient +from renku_data_services.notebooks.api.classes.k8s_client import NotebookK8sClient from renku_data_services.notebooks.api.classes.repository import GitProvider, Repository from renku_data_services.notebooks.api.schemas.secrets import K8sUserSecrets from renku_data_services.notebooks.api.schemas.server_options import ServerOptions from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK from renku_data_services.notebooks.crs import JupyterServerV1Alpha1 from renku_data_services.notebooks.errors.programming import DuplicateEnvironmentVariableError from renku_data_services.notebooks.errors.user import MissingResourceError +logger = logging.getLogger(__name__) -class UserServer(ABC): + +class UserServer: """Represents a Renku server session.""" def __init__( @@ -43,18 +45,19 @@ def __init__( environment_variables: dict[str, str], user_secrets: K8sUserSecrets | None, cloudstorage: Sequence[ICloudStorageRequest], - k8s_client: K8sClient, + k8s_client: NotebookK8sClient[JupyterServerV1Alpha1], workspace_mount_path: PurePosixPath, work_dir: PurePosixPath, config: NotebooksConfig, internal_gitlab_user: APIUser, + host: str, using_default_image: bool = False, is_image_private: bool = False, repositories: list[Repository] | None = None, ): self._user = user self.server_name = server_name - self._k8s_client: K8sClient[JupyterServerV1Alpha1, JupyterServerV1Alpha1Kr8s] = k8s_client + self._k8s_client = k8s_client self.safe_username = self._user.id self.image = image self.server_options = server_options @@ -65,6 +68,7 @@ def __init__( self.work_dir = work_dir self.cloudstorage = cloudstorage self.is_image_private = is_image_private + self.host = host self.config = config self.internal_gitlab_user = internal_gitlab_user @@ -89,16 +93,19 @@ def __init__( self._git_providers: list[GitProvider] | None = None self._has_configured_git_providers = False + self.server_url = f"https://{self.host}/sessions/{self.server_name}" + if not self._user.is_authenticated: + self.server_url = f"{self.server_url}?token={self._user.id}" + + def k8s_namespace(self) -> str: + """Get the preferred namespace for a server.""" + return self._k8s_client.namespace() + @property def user(self) -> AnonymousAPIUser | AuthenticatedAPIUser: """Getter for server's user.""" return self._user - @property - def k8s_client(self) -> K8sClient: - """Return server's k8s client.""" - return self._k8s_client - async def repositories(self) -> list[Repository]: """Get the list of repositories in the project.""" # Configure git repository providers based on matching URLs. @@ -116,19 +123,6 @@ async def repositories(self) -> list[Repository]: return self._repositories - @property - def server_url(self) -> str: - """The URL where a user can access their session.""" - if self._user.is_authenticated: - return urljoin( - f"https://{self.config.sessions.ingress.host}", - f"sessions/{self.server_name}", - ) - return urljoin( - f"https://{self.config.sessions.ingress.host}", - f"sessions/{self.server_name}?token={self._user.id}", - ) - async def git_providers(self) -> list[GitProvider]: """The list of git providers.""" if self._git_providers is None: @@ -157,7 +151,7 @@ async def start(self) -> JupyterServerV1Alpha1 | None: ) session_manifest = await self._get_session_manifest() manifest = JupyterServerV1Alpha1.model_validate(session_manifest) - return await self._k8s_client.create_server(manifest, self.safe_username) + return await self._k8s_client.create_session(manifest, self.user) @staticmethod def _check_environment_variables_overrides(patches_list: list[dict[str, Any]]) -> None: @@ -187,8 +181,7 @@ def _check_environment_variables_overrides(patches_list: list[dict[str, Any]]) - def _get_start_errors(self) -> list[str]: """Check if there are any errors before starting the server.""" - errors: list[str] - errors = [] + errors: list[str] = [] if self.image is None: errors.append(f"image {self.image} does not exist or cannot be accessed") return errors @@ -204,6 +197,8 @@ async def _get_session_manifest(self) -> dict[str, Any]: "size": self.server_options.storage, "pvc": { "enabled": True, + # We should check against the cluster, but as this is only used by V1 sessions, we ignore this + # use-case. "storageClassName": self.config.sessions.storage.pvs_storage_class, "mountPath": self.workspace_mount_path.as_posix(), }, @@ -234,10 +229,23 @@ async def _get_session_manifest(self) -> dict[str, Any]: "token": self._user.id, "oidc": {"enabled": False}, } + + cluster = await self.config.k8s_client.cluster_by_class_id(self.server_options.resource_class_id, self._user) + ( + base_server_path, + base_server_url, + base_server_https_url, + host, + tls_secret, + ingress_annotations, + ) = await cluster.get_ingress_parameters( + self._user, self.config.cluster_rp, self.config.sessions.ingress, self.server_name + ) + # Combine everything into the manifest manifest = { - "apiVersion": f"{self.config.amalthea.group}/{self.config.amalthea.version}", - "kind": "JupyterServer", + "apiVersion": JUPYTER_SESSION_GVK.group_version, + "kind": JUPYTER_SESSION_GVK.kind, "metadata": { "name": self.server_name, "labels": self.get_labels(), @@ -263,12 +271,12 @@ async def _get_session_manifest(self) -> dict[str, Any]: ), }, "routing": { - "host": urlparse(self.server_url).netloc, - "path": urlparse(self.server_url).path, - "ingressAnnotations": self.config.sessions.ingress.annotations, + "host": host, + "path": base_server_path, + "ingressAnnotations": ingress_annotations, "tls": { - "enabled": self.config.sessions.ingress.tls_secret is not None, - "secretName": self.config.sessions.ingress.tls_secret, + "enabled": tls_secret is not None, + "secretName": tls_secret.name if tls_secret is not None else "", }, }, "storage": storage, @@ -294,7 +302,6 @@ async def _get_patches(self) -> list[dict[str, Any]]: jupyter_server_patches.image_pull_secret(self, self.internal_gitlab_user.access_token), jupyter_server_patches.disable_service_links(), jupyter_server_patches.rstudio_env_variables(self), - jupyter_server_patches.user_secrets(self), await git_proxy_patches.main(self), await git_sidecar_patches.main(self), general_patches.oidc_unverified_email(self), @@ -310,6 +317,9 @@ async def _get_patches(self) -> list[dict[str, Any]]: # WARN: this patch depends on the index of the sidecar and so needs to be updated # if sidercars are added or removed await cloudstorage_patches.main(self), + # NOTE: User secrets adds an init container, volume and mounts, so it may affect + # indices in other patches. + jupyter_server_patches.user_secrets(self), ) ) @@ -332,11 +342,14 @@ def get_labels(self) -> dict[str, str | None]: def get_annotations(self) -> dict[str, str | None]: """Get the annotations for the session.""" prefix = self._get_renku_annotation_prefix() + username = self._user.id + if isinstance(self.user, AuthenticatedAPIUser) and self._user.email: + username = self._user.email annotations = { f"{prefix}commit-sha": None, f"{prefix}gitlabProjectId": None, f"{prefix}safe-username": self._user.id, - f"{prefix}username": self._user.id, + f"{prefix}username": username, f"{prefix}userId": self._user.id, f"{prefix}servername": self.server_name, f"{prefix}branch": None, @@ -367,39 +380,36 @@ def __init__( self, user: AnonymousAPIUser | AuthenticatedAPIUser, server_name: str, - namespace: str, + gl_namespace: str, project: str, branch: str, commit_sha: str, - notebook: str | None, # TODO: Is this value actually needed? image: str | None, server_options: ServerOptions, environment_variables: dict[str, str], user_secrets: K8sUserSecrets | None, cloudstorage: Sequence[ICloudStorageRequest], - k8s_client: K8sClient, + k8s_client: NotebookK8sClient, workspace_mount_path: PurePosixPath, work_dir: PurePosixPath, config: NotebooksConfig, + host: str, gitlab_project: Project | None, internal_gitlab_user: APIUser, using_default_image: bool = False, is_image_private: bool = False, - **_: dict, + **_: dict, # Required to ignore unused arguments, among which repositories ): - self.gitlab_project = gitlab_project - self.internal_gitlab_user = internal_gitlab_user - self.gitlab_project_name = f"{namespace}/{project}" - single_repository = ( + repositories = [ Repository( - url=self.gitlab_project.http_url_to_repo, - dirname=self.gitlab_project.path, + url=p.http_url_to_repo, + dirname=p.path, branch=branch, commit_sha=commit_sha, ) - if self.gitlab_project is not None - else None - ) + for p in [gitlab_project] + if p is not None + ] super().__init__( user=user, @@ -414,18 +424,18 @@ def __init__( work_dir=work_dir, using_default_image=using_default_image, is_image_private=is_image_private, - repositories=[single_repository] if single_repository is not None else [], + repositories=repositories, + host=host, config=config, internal_gitlab_user=internal_gitlab_user, ) - self.namespace = namespace + self.gl_namespace = gl_namespace self.project = project self.branch = branch self.commit_sha = commit_sha - self.notebook = notebook self.git_host = urlparse(config.git.url).netloc - self.single_repository = single_repository + self.gitlab_project = gitlab_project def _get_start_errors(self) -> list[str]: """Check if there are any errors before starting the server.""" @@ -480,80 +490,9 @@ def get_annotations(self) -> dict[str, str | None]: annotations[f"{prefix}commit-sha"] = self.commit_sha annotations[f"{prefix}branch"] = self.branch annotations[f"{prefix}git-host"] = self.git_host - annotations[f"{prefix}namespace"] = self.namespace + annotations[f"{prefix}namespace"] = self.gl_namespace annotations[f"{prefix}projectName"] = self.project if self.gitlab_project is not None: annotations[f"{prefix}gitlabProjectId"] = str(self.gitlab_project.id) annotations[f"{prefix}repository"] = self.gitlab_project.web_url return annotations - - -class Renku2UserServer(UserServer): - """Represents a Renku 2.0 server session.""" - - def __init__( - self, - user: AnonymousAPIUser | AuthenticatedAPIUser, - image: str, - project_id: str, - launcher_id: str, - server_name: str, - server_options: ServerOptions, - environment_variables: dict[str, str], - user_secrets: K8sUserSecrets | None, - cloudstorage: Sequence[ICloudStorageRequest], - k8s_client: K8sClient, - workspace_mount_path: PurePosixPath, - work_dir: PurePosixPath, - repositories: list[Repository], - config: NotebooksConfig, - internal_gitlab_user: APIUser, - using_default_image: bool = False, - is_image_private: bool = False, - **_: dict, - ): - super().__init__( - user=user, - server_name=server_name, - image=image, - server_options=server_options, - environment_variables=environment_variables, - user_secrets=user_secrets, - cloudstorage=cloudstorage, - k8s_client=k8s_client, - workspace_mount_path=workspace_mount_path, - work_dir=work_dir, - using_default_image=using_default_image, - is_image_private=is_image_private, - repositories=repositories, - config=config, - internal_gitlab_user=internal_gitlab_user, - ) - - self.project_id = project_id - self.launcher_id = launcher_id - - def get_labels(self) -> dict[str, str | None]: - """Get the labels of the jupyter server.""" - prefix = self._get_renku_annotation_prefix() - labels = super().get_labels() - - # for validation purpose - for item in ["commit-sha", "gitlabProjectId"]: - labels[f"{prefix}{item}"] = "" - - return labels - - def get_annotations(self) -> dict[str, str | None]: - """Get the annotations of the session.""" - prefix = self._get_renku_annotation_prefix() - annotations = super().get_annotations() - annotations[f"{prefix}renkuVersion"] = "2.0" - annotations[f"{prefix}projectId"] = self.project_id - annotations[f"{prefix}launcherId"] = self.launcher_id - - # for validation purpose - for item in ["commit-sha", "branch", "git-host", "namespace", "projectName", "gitlabProjectId", "repository"]: - annotations[f"{prefix}{item}"] = "" - - return annotations diff --git a/components/renku_data_services/notebooks/api/classes/user.py b/components/renku_data_services/notebooks/api/classes/user.py index 0700759a1..34484e909 100644 --- a/components/renku_data_services/notebooks/api/classes/user.py +++ b/components/renku_data_services/notebooks/api/classes/user.py @@ -5,7 +5,15 @@ from gitlab import Gitlab from gitlab.v4.objects.projects import Project from gitlab.v4.objects.users import CurrentUser -from sanic.log import logger + +from renku_data_services.app_config import logging + +logger = logging.getLogger(__name__) + + +@lru_cache(maxsize=8) +def _get_project(client: Gitlab, namespace_project: str) -> Project: + return client.projects.get(f"{namespace_project}") class NotebooksGitlabClient: @@ -21,11 +29,10 @@ def gitlab_user(self) -> CurrentUser | None: self.gitlab_client.auth() return self.gitlab_client.user - @lru_cache(maxsize=8) def get_renku_project(self, namespace_project: str) -> Project | None: """Retrieve the GitLab project.""" try: - return self.gitlab_client.projects.get(f"{namespace_project}") + return _get_project(self.gitlab_client, namespace_project) except Exception as e: logger.warning(f"Cannot find the gitlab project: {namespace_project}, error: {e}") return None diff --git a/components/renku_data_services/notebooks/api/schemas/cloud_storage.py b/components/renku_data_services/notebooks/api/schemas/cloud_storage.py index 0991cb71b..25d5e7edd 100644 --- a/components/renku_data_services/notebooks/api/schemas/cloud_storage.py +++ b/components/renku_data_services/notebooks/api/schemas/cloud_storage.py @@ -1,5 +1,6 @@ """Schema for cloudstorage config.""" +import json from configparser import ConfigParser from io import StringIO from pathlib import PurePosixPath @@ -8,9 +9,8 @@ from kubernetes import client from marshmallow import EXCLUDE, Schema, ValidationError, fields, validates_schema -from renku_data_services.base_models import APIUser from renku_data_services.notebooks.api.classes.cloud_storage import ICloudStorageRequest -from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.storage.models import CloudStorage _sanitize_for_serialization = client.ApiClient().sanitize_for_serialization @@ -57,54 +57,55 @@ def __init__( readonly: bool, mount_folder: str, name: Optional[str], - config: NotebooksConfig, + secrets: dict[str, str], # "Mapping between secret ID (key) and secret name (value) + storage_class: str, + user_secret_key: str | None = None, ) -> None: """Creates a cloud storage instance without validating the configuration.""" - self.config = config self.configuration = configuration self.source_path = source_path self.mount_folder = mount_folder self.readonly = readonly self.name = name + self.secrets = secrets + self.base_name: str | None = None + self.user_secret_key = user_secret_key + self.storage_class = storage_class @classmethod async def storage_from_schema( cls, data: dict[str, Any], - user: APIUser, - internal_gitlab_user: APIUser, - project_id: int, work_dir: PurePosixPath, - config: NotebooksConfig, + saved_storage: CloudStorage | None, + storage_class: str, + user_secret_key: str | None = None, ) -> Self: """Create storage object from request.""" name = None - if data.get("storage_id"): - # Load from storage service - if user.access_token is None: - raise ValidationError("Storage mounting is only supported for logged-in users.") - if project_id < 1: - raise ValidationError("Could not get gitlab project id") - ( - configuration, - source_path, - target_path, - readonly, - name, - ) = await config.storage_validator.get_storage_by_id( - user, internal_gitlab_user, project_id, data["storage_id"] - ) - configuration = {**configuration, **(configuration or {})} - readonly = readonly + if saved_storage: + configuration = {**saved_storage.configuration.model_dump(), **(data.get("configuration", {}))} + readonly = saved_storage.readonly + name = saved_storage.name else: source_path = data["source_path"] target_path = data["target_path"] configuration = data["configuration"] readonly = data.get("readonly", True) - mount_folder = str(work_dir / target_path) - await config.storage_validator.validate_storage_configuration(configuration, source_path) - return cls(source_path, configuration, readonly, mount_folder, name, config) + # NOTE: This is used only in Renku v1, there we do not save secrets for storage + secrets: dict[str, str] = {} + mount_folder = str(work_dir / target_path) + return cls( + source_path=source_path, + configuration=configuration, + readonly=readonly, + mount_folder=mount_folder, + name=name, + storage_class=storage_class, + secrets=secrets, + user_secret_key=user_secret_key, + ) def pvc( self, @@ -115,6 +116,8 @@ def pvc( ) -> client.V1PersistentVolumeClaim: """The PVC for mounting cloud storage.""" return client.V1PersistentVolumeClaim( + api_version="v1", + kind="PersistentVolumeClaim", metadata=client.V1ObjectMeta( name=base_name, namespace=namespace, @@ -124,7 +127,7 @@ def pvc( spec=client.V1PersistentVolumeClaimSpec( access_modes=["ReadOnlyMany" if self.readonly else "ReadWriteMany"], resources=client.V1VolumeResourceRequirements(requests={"storage": "10Gi"}), - storage_class_name=self.config.cloud_storage.storage_class, + storage_class_name=self.storage_class, ), ) @@ -159,9 +162,16 @@ def secret( "remotePath": self.source_path, "configData": self.config_string(self.name or base_name), } + string_data.update(self.mount_options()) + # NOTE: in Renku v1 this function is not directly called so the base name + # comes from the user_secret_key property on the class instance + if self.user_secret_key: + string_data["secretKey"] = self.user_secret_key if user_secret_key: string_data["secretKey"] = user_secret_key return client.V1Secret( + api_version="v1", + kind="Secret", metadata=client.V1ObjectMeta( name=base_name, namespace=namespace, @@ -179,6 +189,7 @@ def get_manifest_patch( annotations: dict[str, str] | None = None, ) -> list[dict[str, Any]]: """Get server manifest patch.""" + self.base_name = base_name patches = [] patches.append( { @@ -216,17 +227,43 @@ def config_string(self, name: str) -> str: """ if not self.configuration: raise ValidationError("Missing configuration for cloud storage") - # TODO Use RCloneValidator.get_real_configuration(...) instead. - real_config = dict(self.configuration) - if real_config["type"] == "s3" and real_config.get("provider") == "Switch": + + # Transform configuration for polybox or switchDrive + storage_type = self.configuration.get("type", "") + access = self.configuration.get("provider", "") + + if storage_type == "polybox" or storage_type == "switchDrive": + self.configuration["type"] = "webdav" + self.configuration["provider"] = "" + # NOTE: Without the vendor field mounting storage and editing files results in the modification + # time for touched files to be temporarily set to `1999-09-04` which causes the text + # editor to complain that the file has changed and whether it should overwrite new changes. + self.configuration["vendor"] = "owncloud" + + if access == "shared" and storage_type == "polybox": + self.configuration["url"] = "https://polybox.ethz.ch/public.php/webdav/" + elif access == "shared" and storage_type == "switchDrive": + self.configuration["url"] = "https://drive.switch.ch/public.php/webdav/" + elif access == "personal" and storage_type == "polybox": + self.configuration["url"] = "https://polybox.ethz.ch/remote.php/webdav/" + elif access == "personal" and storage_type == "switchDrive": + self.configuration["url"] = "https://drive.switch.ch/remote.php/webdav/" + + # Extract the user from the public link + if access == "shared" and storage_type in {"polybox", "switchDrive"}: + public_link = self.configuration.get("public_link", "") + user_identifier = public_link.split("/")[-1] + self.configuration["user"] = user_identifier + + if self.configuration["type"] == "s3" and self.configuration.get("provider", None) == "Switch": # Switch is a fake provider we add for users, we need to replace it since rclone itself # doesn't know it - real_config["provider"] = "Other" - elif real_config["type"] == "openbis": - real_config["type"] = "sftp" - real_config["port"] = "2222" - real_config["user"] = "?" - real_config["pass"] = real_config.pop("session_token") + self.configuration["provider"] = "Other" + elif self.configuration["type"] == "openbis": + self.configuration["type"] = "sftp" + self.configuration["port"] = "2222" + self.configuration["user"] = "?" + self.configuration["pass"] = self.configuration.pop("session_token") parser = ConfigParser() parser.add_section(name) @@ -236,7 +273,7 @@ def _stringify(value: Any) -> str: return "true" if value else "false" return str(value) - for k, v in real_config.items(): + for k, v in self.configuration.items(): parser.set(name, k, _stringify(v)) stringio = StringIO() parser.write(stringio) @@ -250,7 +287,35 @@ def with_override(self, override: RCloneStorageRequestOverride) -> "RCloneStorag readonly=override.readonly if override.readonly is not None else self.readonly, configuration=override.configuration if override.configuration else self.configuration, name=self.name, - config=self.config, + secrets=self.secrets, + storage_class=self.storage_class, + user_secret_key=self.user_secret_key, + ) + + def mount_options(self) -> dict[str, str]: + """Returns extra mount options for this storage.""" + if not self.configuration: + raise ValidationError("Missing configuration for cloud storage") + + vfs_options: dict[str, Any] = dict() + mount_options: dict[str, Any] = dict() + storage_type = self.configuration.get("type", "") + if storage_type == "doi": + vfs_options["CacheMode"] = "full" + mount_options["AttrTimeout"] = "41s" + + options: dict[str, str] = dict() + if vfs_options: + options["vfsOpt"] = json.dumps(vfs_options) + if mount_options: + options["mountOpt"] = json.dumps(mount_options) + return options + + def __repr__(self) -> str: + """Override to make sure no secrets or sensitive configuration gets printed in logs.""" + return ( + f"{RCloneStorageRequest.__name__}(name={self.name}, source_path={self.source_path}, " + f"mount_folder={self.mount_folder}, readonly={self.readonly})" ) diff --git a/components/renku_data_services/notebooks/api/schemas/servers_get.py b/components/renku_data_services/notebooks/api/schemas/servers_get.py index 110356c92..76f9d4a61 100644 --- a/components/renku_data_services/notebooks/api/schemas/servers_get.py +++ b/components/renku_data_services/notebooks/api/schemas/servers_get.py @@ -33,10 +33,10 @@ def list(cls) -> list[str]: class StepStatusEnum(Enum): """Enum for status of a session start step.""" - ready: str = "ready" # An init job completely done or container fully running - waiting: str = "waiting" # Waiting to start - executing: str = "executing" # Running but not complete or fully ready - failed: str = "failed" + ready = "ready" # An init job completely done or container fully running + waiting = "waiting" # Waiting to start + executing = "executing" # Running but not complete or fully ready + failed = "failed" @classmethod def list(cls) -> list[str]: diff --git a/components/renku_data_services/notebooks/api/schemas/servers_post.py b/components/renku_data_services/notebooks/api/schemas/servers_post.py index b890ffb18..2550a98d1 100644 --- a/components/renku_data_services/notebooks/api/schemas/servers_post.py +++ b/components/renku_data_services/notebooks/api/schemas/servers_post.py @@ -35,6 +35,7 @@ class LaunchNotebookRequestWithoutStorageBase(Schema): # User uploaded secrets # Contains secret id list and mount path user_secrets = fields.Nested(UserSecrets(), required=False, load_default=None) + cluster_name = fields.Str(required=False, load_default=None) class LaunchNotebookRequestWithoutStorage(LaunchNotebookRequestWithoutStorageBase): diff --git a/components/renku_data_services/notebooks/apispec.py b/components/renku_data_services/notebooks/apispec.py index 860256726..8f5673816 100644 --- a/components/renku_data_services/notebooks/apispec.py +++ b/components/renku_data_services/notebooks/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-10-28T16:00:32+00:00 +# timestamp: 2025-04-17T14:26:28+00:00 from __future__ import annotations @@ -34,12 +34,21 @@ class DefaultCullingThresholds(BaseAPISpec): registered: CullingThreshold +class EnvVarOverride(BaseAPISpec): + name: str = Field( + ..., examples=["MY_VAR"], max_length=256, pattern="^[a-zA-Z_][a-zA-Z0-9_]*$" + ) + value: str = Field(..., max_length=500) + + class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): @@ -353,7 +362,7 @@ class NotebookResponse(BaseAPISpec): image: Optional[str] = None name: Optional[str] = Field( None, - example="d185e68d-d43-renku-2-b9ac279a4e8a85ac28d08", + examples=["d185e68d-d43-renku-2-b9ac279a4e8a85ac28d08"], max_length=50, min_length=5, pattern="^[a-z]([-a-z0-9]*[a-z0-9])?$", @@ -382,13 +391,16 @@ class SessionPostRequest(BaseAPISpec): ) resource_class_id: Optional[int] = None cloudstorage: Optional[List[SessionCloudStoragePost]] = None + env_variable_overrides: Optional[List[EnvVarOverride]] = Field( + None, description="Environment variable overrides for the session pod" + ) class SessionResponse(BaseAPISpec): image: str name: str = Field( ..., - example="d185e68d-d43-renku-2-b9ac279a4e8a85ac28d08", + examples=["d185e68d-d43-renku-2-b9ac279a4e8a85ac28d08"], max_length=50, min_length=5, pattern="^[a-z]([-a-z0-9]*[a-z0-9])?$", diff --git a/components/renku_data_services/notebooks/blueprints.py b/components/renku_data_services/notebooks/blueprints.py index 77bdc0669..b45a1c15a 100644 --- a/components/renku_data_services/notebooks/blueprints.py +++ b/components/renku_data_services/notebooks/blueprints.py @@ -1,86 +1,79 @@ """Notebooks service API.""" -import base64 -import os from dataclasses import dataclass from pathlib import PurePosixPath -from typing import Any, cast -from urllib.parse import urljoin, urlparse -import httpx -from kubernetes.client import V1ObjectMeta, V1Secret from sanic import Request, empty, exceptions, json from sanic.response import HTTPResponse, JSONResponse from sanic_ext import validate -from toml import dumps from ulid import ULID -from yaml import safe_dump from renku_data_services import base_models from renku_data_services.base_api.auth import authenticate, authenticate_2 from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint from renku_data_services.base_models import AnonymousAPIUser, APIUser, AuthenticatedAPIUser, Authenticator -from renku_data_services.crc.db import ResourcePoolRepository -from renku_data_services.crc.models import GpuKind +from renku_data_services.base_models.metrics import MetricsService +from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository from renku_data_services.data_connectors.db import ( - DataConnectorProjectLinkRepository, DataConnectorRepository, DataConnectorSecretRepository, ) -from renku_data_services.data_connectors.models import DataConnectorSecret from renku_data_services.errors import errors from renku_data_services.notebooks import apispec, core -from renku_data_services.notebooks.api.amalthea_patches import git_proxy, init_containers -from renku_data_services.notebooks.api.classes.repository import Repository -from renku_data_services.notebooks.api.schemas.cloud_storage import RCloneStorage +from renku_data_services.notebooks.api.amalthea_patches.init_containers import user_secrets_container from renku_data_services.notebooks.api.schemas.config_server_options import ServerOptionsEndpointResponse from renku_data_services.notebooks.api.schemas.logs import ServerLogs -from renku_data_services.notebooks.api.schemas.servers_get import ( - NotebookResponse, - ServersGetResponse, -) from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.core_sessions import ( + get_auth_secret_anonymous, + get_auth_secret_authenticated, + get_culling, + get_data_sources, + get_extra_containers, + get_extra_init_containers, + get_gitlab_image_pull_secret, + get_launcher_env_variables, + patch_session, + repositories_from_project, + request_dc_secret_creation, + request_session_secret_creation, + requires_image_pull_secret, + resources_from_resource_class, + verify_launcher_env_variable_overrides, +) from renku_data_services.notebooks.crs import ( - Affinity, AmaltheaSessionSpec, AmaltheaSessionV1Alpha1, Authentication, AuthenticationType, - Culling, - DataSource, - ExtraContainer, ExtraVolume, ExtraVolumeMount, + ImagePullPolicy, + ImagePullSecret, Ingress, InitContainer, Metadata, ReconcileStrategy, - Resources, - SecretAsVolume, - SecretAsVolumeItem, - SecretRefKey, - SecretRefWhole, Session, SessionEnvItem, - State, + ShmSizeStr, + SizeStr, Storage, - TlsSecret, - Toleration, ) from renku_data_services.notebooks.errors.intermittent import AnonymousUserPatchError +from renku_data_services.notebooks.models import ExtraSecret from renku_data_services.notebooks.util.kubernetes_ import ( renku_2_make_server_name, ) from renku_data_services.notebooks.utils import ( - get_user_secret, - merge_node_affinities, node_affinity_from_resource_class, tolerations_from_resource_class, ) -from renku_data_services.project.db import ProjectRepository +from renku_data_services.project.db import ProjectRepository, ProjectSessionSecretRepository from renku_data_services.repositories.db import GitRepositoriesRepository from renku_data_services.session.db import SessionRepository from renku_data_services.storage.db import StorageRepository +from renku_data_services.users.db import UserRepo @dataclass(kw_only=True) @@ -92,6 +85,8 @@ class NotebooksBP(CustomBlueprint): git_repo: GitRepositoriesRepository internal_gitlab_authenticator: base_models.Authenticator rp_repo: ResourcePoolRepository + user_repo: UserRepo + storage_repo: StorageRepository def version(self) -> BlueprintFactoryResponse: """Return notebook services version.""" @@ -110,7 +105,7 @@ async def _user_servers( ) -> JSONResponse: filter_attrs = list(filter(lambda x: x[1] is not None, request.get_query_args())) filtered_servers = await core.user_servers(self.nb_config, user, filter_attrs) - return json(ServersGetResponse().dump({"servers": filtered_servers})) + return core.serialize_v1_servers(filtered_servers, self.nb_config) return "/notebooks/servers", ["GET"], _user_servers @@ -122,46 +117,32 @@ async def _user_server( request: Request, user: AnonymousAPIUser | AuthenticatedAPIUser, server_name: str ) -> JSONResponse: server = await core.user_server(self.nb_config, user, server_name) - return json(NotebookResponse().dump(server)) + return core.serialize_v1_server(server, self.nb_config) return "/notebooks/servers/", ["GET"], _user_server def launch_notebook(self) -> BlueprintFactoryResponse: """Start a renku session.""" - @authenticate_2(self.authenticator, self.internal_gitlab_authenticator) - @validate(json=apispec.LaunchNotebookRequest) - async def _launch_notebook( - request: Request, - user: AnonymousAPIUser | AuthenticatedAPIUser, - internal_gitlab_user: APIUser, - body: apispec.LaunchNotebookRequest, - ) -> JSONResponse: - server, status_code = await core.launch_notebook(self.nb_config, user, internal_gitlab_user, body) - return json(NotebookResponse().dump(server), status_code) - - return "/notebooks/servers", ["POST"], _launch_notebook - - def launch_notebook_old(self) -> BlueprintFactoryResponse: - """Start a renku session using the old operator.""" - @authenticate_2(self.authenticator, self.internal_gitlab_authenticator) @validate(json=apispec.LaunchNotebookRequestOld) - async def _launch_notebook_old( + async def _launch_notebook( request: Request, user: AnonymousAPIUser | AuthenticatedAPIUser, internal_gitlab_user: APIUser, body: apispec.LaunchNotebookRequestOld, ) -> JSONResponse: - server, status_code = await core.launch_notebook_old( + server, status_code = await core.launch_notebook( self.nb_config, user, internal_gitlab_user, body, + user_repo=self.user_repo, + storage_repo=self.storage_repo, ) - return json(NotebookResponse().dump(server), status_code) + return core.serialize_v1_server(server, self.nb_config, status_code) - return "/notebooks/old/servers", ["POST"], _launch_notebook_old + return "/notebooks/servers", ["POST"], _launch_notebook def patch_server(self) -> BlueprintFactoryResponse: """Patch a user server by name based on the query param.""" @@ -179,11 +160,7 @@ async def _patch_server( raise AnonymousUserPatchError() manifest = await core.patch_server(self.nb_config, user, internal_gitlab_user, server_name, body) - notebook_response = apispec.NotebookResponse.parse_obj(manifest) - return json( - notebook_response.model_dump(), - 200, - ) + return core.serialize_v1_server(manifest, self.nb_config) return "/notebooks/servers/", ["PATCH"], _patch_server @@ -197,7 +174,7 @@ async def _stop_server( try: await core.stop_server(self.nb_config, user, server_name) except errors.MissingResourceError as err: - raise exceptions.NotFound(message=err.message) + raise exceptions.NotFound(message=err.message) from err return HTTPResponse(status=204) return "/notebooks/servers/", ["DELETE"], _stop_server @@ -222,7 +199,7 @@ async def _server_logs( try: logs = await core.server_logs(self.nb_config, user, server_name, max_lines) except errors.MissingResourceError as err: - raise exceptions.NotFound(message=err.message) + raise exceptions.NotFound(message=err.message) from err return json(ServerLogs().dump(logs)) return "/notebooks/logs/", ["GET"], _server_logs @@ -256,12 +233,15 @@ class NotebooksNewBP(CustomBlueprint): internal_gitlab_authenticator: base_models.Authenticator nb_config: NotebooksConfig project_repo: ProjectRepository + project_session_secret_repo: ProjectSessionSecretRepository session_repo: SessionRepository rp_repo: ResourcePoolRepository storage_repo: StorageRepository + user_repo: UserRepo data_connector_repo: DataConnectorRepository - data_connector_project_link_repo: DataConnectorProjectLinkRepository data_connector_secret_repo: DataConnectorSecretRepository + metrics: MetricsService + cluster_repo: ClusterRepository def start(self) -> BlueprintFactoryResponse: """Start a session with the new operator.""" @@ -274,13 +254,15 @@ async def _handler( internal_gitlab_user: APIUser, body: apispec.SessionPostRequest, ) -> JSONResponse: - # gitlab_client = NotebooksGitlabClient(self.nb_config.git.url, internal_gitlab_user.access_token) launcher = await self.session_repo.get_launcher(user, ULID.from_str(body.launcher_id)) project = await self.project_repo.get_project(user=user, project_id=launcher.project_id) + # We have to use body.resource_class_id and not launcher.resource_class_id as it may have been overridden by + # the user when selecting a different resource class from a different resource pool. + cluster = await self.nb_config.k8s_v2_client.cluster_by_class_id(body.resource_class_id, user) server_name = renku_2_make_server_name( - user=user, project_id=str(launcher.project_id), launcher_id=body.launcher_id + user=user, project_id=str(launcher.project_id), launcher_id=body.launcher_id, cluster_id=cluster.id ) - existing_session = await self.nb_config.k8s_v2_client.get_server(server_name, user.id) + existing_session = await self.nb_config.k8s_v2_client.get_session(server_name, user.id) if existing_session is not None and existing_session.spec is not None: return json(existing_session.as_apispec().model_dump(exclude_none=True, mode="json")) environment = launcher.environment @@ -291,322 +273,227 @@ async def _handler( default_resource_class = await self.rp_repo.get_default_resource_class() if default_resource_class.id is None: raise errors.ProgrammingError(message="The default resource class has to have an ID", quiet=True) - resource_class_id: int - quota: str | None = None if body.resource_class_id is None: - resource_class = await self.rp_repo.get_default_resource_class() - # TODO: Add types for saved and unsaved resource class - resource_class_id = cast(int, resource_class.id) + resource_pool = await self.rp_repo.get_default_resource_pool() + resource_class = resource_pool.get_default_resource_class() + if not resource_class and len(resource_pool.classes) > 0: + resource_class = resource_pool.classes[0] + if not resource_class or not resource_class.id: + raise errors.ProgrammingError(message="There cannot find any resource classes in the default pool.") else: - resource_class = await self.rp_repo.get_resource_class(user, resource_class_id) - # TODO: Add types for saved and unsaved resource class - resource_class_id = cast(int, resource_class.id) - quota = resource_class.quota - await self.nb_config.crc_validator.validate_class_storage(user, resource_class_id, body.disk_storage) + resource_pool = await self.rp_repo.get_resource_pool_from_class(user, body.resource_class_id) + resource_class = resource_pool.get_resource_class(body.resource_class_id) + if not resource_class or not resource_class.id: + raise errors.MissingResourceError( + message=f"The resource class with ID {body.resource_class_id} does not exist." + ) + await self.nb_config.crc_validator.validate_class_storage(user, resource_class.id, body.disk_storage) work_dir_fallback = PurePosixPath("/home/jovyan") work_dir = environment.working_directory or image_workdir or work_dir_fallback storage_mount_fallback = work_dir / "work" - # TODO: Wait for pitch on users secrets to implement this - # user_secrets: K8sUserSecrets | None = None - # if body.user_secrets: - # user_secrets = K8sUserSecrets( - # name=server_name, - # user_secret_ids=body.user_secrets.user_secret_ids, - # mount_path=body.user_secrets.mount_path, - # ) + storage_mount = launcher.environment.mount_directory or storage_mount_fallback + secrets_mount_directory = storage_mount / project.secrets_mount_directory + session_secrets = await self.project_session_secret_repo.get_all_session_secrets_from_project( + user=user, project_id=project.id + ) data_connectors_stream = self.data_connector_secret_repo.get_data_connectors_with_secrets(user, project.id) - dcs: dict[str, RCloneStorage] = {} - dcs_secrets: dict[str, list[DataConnectorSecret]] = {} - async for dc in data_connectors_stream: - dcs[str(dc.data_connector.id)] = RCloneStorage( - source_path=dc.data_connector.storage.source_path, - mount_folder=dc.data_connector.storage.target_path - if PurePosixPath(dc.data_connector.storage.target_path).is_absolute() - else (work_dir / dc.data_connector.storage.target_path).as_posix(), - configuration=dc.data_connector.storage.configuration, - readonly=dc.data_connector.storage.readonly, - config=self.nb_config, - name=dc.data_connector.name, - ) - if len(dc.secrets) > 0: - dcs_secrets[str(dc.data_connector.id)] = dc.secrets - # NOTE: Check the cloud storage in the request body and if any match - # then overwrite the projects cloud storages - # NOTE: Cloud storages in the session launch request body that are not from the DB will cause a 404 error - # NOTE: Overriding the configuration when a saved secret is there will cause a 422 error - cloud_storage_overrides = body.cloudstorage or [] - for csr in cloud_storage_overrides: - csr_id = csr.storage_id - if csr_id not in dcs: - raise errors.MissingResourceError( - message=f"You have requested a cloud storage with ID {csr_id} which does not exist " - "or you dont have access to.", - quiet=True, - ) - if csr.target_path is not None and not PurePosixPath(csr.target_path).is_absolute(): - csr.target_path = (work_dir / csr.target_path).as_posix() - dcs[csr_id] = dcs[csr_id].with_override(csr) git_providers = await self.nb_config.git_provider_helper.get_providers(user=user) - repositories: list[Repository] = [] - for repo in project.repositories: - found_provider_id: str | None = None - for provider in git_providers: - if urlparse(provider.url).netloc == urlparse(repo).netloc: - found_provider_id = provider.id - break - repositories.append(Repository(url=repo, provider=found_provider_id)) - secrets_to_create: list[V1Secret] = [] - # Generate the cloud starge secrets - data_sources: list[DataSource] = [] - user_secret_key: str | None = None - if isinstance(user, AuthenticatedAPIUser) and len(dcs_secrets) > 0: - user_secret_key = await get_user_secret(self.nb_config.data_service_url, user) - for cs_id, cs in dcs.items(): - secret_name = f"{server_name}-ds-{cs_id.lower()}" - secret_key_needed = len(dcs_secrets.get(cs_id, [])) > 0 - if secret_key_needed and user_secret_key is None: - raise errors.ProgrammingError( - message=f"You have saved storage secrets for data connector {cs_id} " - f"associated with your user ID {user.id} but no key to decrypt them, " - "therefore we cannot mount the requested data connector. " - "Please report this to the renku administrators." - ) - secrets_to_create.append( - cs.secret( - secret_name, - self.nb_config.k8s_client.preferred_namespace, - user_secret_key=user_secret_key if secret_key_needed else None, - ) - ) - data_sources.append( - DataSource( - mountPath=cs.mount_folder, - secretRef=SecretRefWhole(name=secret_name, adopt=True), - accessMode="ReadOnlyMany" if cs.readonly else "ReadWriteOnce", - ) - ) - cert_init, cert_vols = init_containers.certificates_container(self.nb_config) - session_init_containers = [InitContainer.model_validate(self.nb_config.k8s_v2_client.sanitize(cert_init))] - extra_volumes = [ - ExtraVolume.model_validate(self.nb_config.k8s_v2_client.sanitize(volume)) for volume in cert_vols - ] - if isinstance(user, AuthenticatedAPIUser): - extra_volumes.append( - ExtraVolume( - name="renku-authorized-emails", - secret=SecretAsVolume( - secretName=server_name, - items=[SecretAsVolumeItem(key="authorized_emails", path="authorized_emails")], - ), - ) - ) - git_clone = await init_containers.git_clone_container_v2( + repositories = repositories_from_project(project, git_providers) + + # User secrets + extra_volume_mounts: list[ExtraVolumeMount] = [] + extra_volumes: list[ExtraVolume] = [] + extra_init_containers: list[InitContainer] = [] + user_secrets_container_patches = user_secrets_container( user=user, config=self.nb_config, - repositories=repositories, - git_providers=git_providers, - workspace_mount_path=launcher.environment.mount_directory or storage_mount_fallback, + secrets_mount_directory=secrets_mount_directory.as_posix(), + k8s_secret_name=f"{server_name}-secrets", + session_secrets=session_secrets, + ) + if user_secrets_container_patches is not None: + (init_container_session_secret, volumes_session_secret, volume_mounts_session_secret) = ( + user_secrets_container_patches + ) + extra_volumes.extend(volumes_session_secret) + extra_volume_mounts.extend(volume_mounts_session_secret) + extra_init_containers.append(init_container_session_secret) + + secrets_to_create: list[ExtraSecret] = [] + data_sources, data_secrets, enc_secrets = await get_data_sources( + nb_config=self.nb_config, + server_name=server_name, + user=user, + data_connectors_stream=data_connectors_stream, work_dir=work_dir, + cloud_storage_overrides=body.cloudstorage or [], + user_repo=self.user_repo, ) - if git_clone is not None: - session_init_containers.append(InitContainer.model_validate(git_clone)) - extra_containers: list[ExtraContainer] = [] - git_proxy_container = await git_proxy.main_container( - user=user, config=self.nb_config, repositories=repositories, git_providers=git_providers + secrets_to_create.extend(data_secrets) + extra_init_containers_dc, extra_init_volumes_dc = await get_extra_init_containers( + self.nb_config, + user, + repositories, + git_providers, + storage_mount, + work_dir, + uid=environment.uid, + gid=environment.gid, + ) + extra_containers = await get_extra_containers(self.nb_config, user, repositories, git_providers) + extra_volumes.extend(extra_init_volumes_dc) + extra_init_containers.extend(extra_init_containers_dc) + + ( + base_server_path, + base_server_url, + base_server_https_url, + host, + tls_secret, + ingress_annotations, + ) = await cluster.get_ingress_parameters( + user, self.cluster_repo, self.nb_config.sessions.ingress, server_name ) - if git_proxy_container is not None: - extra_containers.append( - ExtraContainer.model_validate(self.nb_config.k8s_v2_client.sanitize(git_proxy_container)) - ) - base_server_url = self.nb_config.sessions.ingress.base_url(server_name) - base_server_https_url = self.nb_config.sessions.ingress.base_url(server_name, force_https=True) - base_server_path = self.nb_config.sessions.ingress.base_path(server_name) - ui_path: str = ( - f"{base_server_path.rstrip("/")}/{environment.default_url.lstrip("/")}" - if len(environment.default_url) > 0 - else base_server_path + ui_path = f"{base_server_path}/{environment.default_url.lstrip('/')}" + + ingress = Ingress( + host=host, + ingressClassName=ingress_annotations.get("kubernetes.io/ingress.class"), + annotations=ingress_annotations, + tlsSecret=tls_secret, + pathPrefix=base_server_path, ) + annotations: dict[str, str] = { "renku.io/project_id": str(launcher.project_id), "renku.io/launcher_id": body.launcher_id, "renku.io/resource_class_id": str(body.resource_class_id or default_resource_class.id), } - requests: dict[str, str | int] = { - "cpu": str(round(resource_class.cpu * 1000)) + "m", - "memory": f"{resource_class.memory}Gi", - } - limits: dict[str, str | int] = {} - if resource_class.gpu > 0: - gpu_name = GpuKind.NVIDIA.value + "/gpu" - requests[gpu_name] = resource_class.gpu - limits[gpu_name] = resource_class.gpu - tolerations = [ - Toleration.model_validate(toleration) for toleration in self.nb_config.sessions.tolerations - ] + tolerations_from_resource_class(resource_class) - affinity = Affinity.model_validate(self.nb_config.sessions.affinity) - rc_node_affinity = node_affinity_from_resource_class(resource_class) - if affinity.nodeAffinity: - affinity.nodeAffinity = merge_node_affinities(affinity.nodeAffinity, rc_node_affinity) + if isinstance(user, AuthenticatedAPIUser): + auth_secret = await get_auth_secret_authenticated( + self.nb_config, user, server_name, base_server_url, base_server_https_url, base_server_path + ) else: - affinity.nodeAffinity = rc_node_affinity + auth_secret = await get_auth_secret_anonymous(self.nb_config, server_name, request) + if auth_secret.volume: + extra_volumes.append(auth_secret.volume) + + image_pull_secret_name = None + if isinstance(user, AuthenticatedAPIUser) and internal_gitlab_user.access_token is not None: + needs_pull_secret = await requires_image_pull_secret(self.nb_config, image, internal_gitlab_user) + + if needs_pull_secret: + image_pull_secret_name = f"{server_name}-image-secret" + + image_secret = get_gitlab_image_pull_secret( + self.nb_config, user, image_pull_secret_name, internal_gitlab_user.access_token + ) + secrets_to_create.append(image_secret) + + secrets_to_create.append(auth_secret) + + # Raise an error if there are invalid environment variables in the request body + verify_launcher_env_variable_overrides(launcher, body) + env = [ + SessionEnvItem(name="RENKU_BASE_URL_PATH", value=base_server_path), + SessionEnvItem(name="RENKU_BASE_URL", value=base_server_url), + SessionEnvItem(name="RENKU_MOUNT_DIR", value=storage_mount.as_posix()), + SessionEnvItem(name="RENKU_SESSION", value="1"), + SessionEnvItem(name="RENKU_SESSION_IP", value="0.0.0.0"), # nosec B104 + SessionEnvItem(name="RENKU_SESSION_PORT", value=f"{environment.port}"), + SessionEnvItem(name="RENKU_WORKING_DIR", value=work_dir.as_posix()), + ] + launcher_env_variables = get_launcher_env_variables(launcher, body) + if launcher_env_variables: + env.extend(launcher_env_variables) + + storage_class = await cluster.get_storage_class( + user, self.cluster_repo, self.nb_config.sessions.storage.pvs_storage_class + ) + service_account_name: str | None = None + if resource_pool.cluster: + service_account_name = resource_pool.cluster.service_account_name manifest = AmaltheaSessionV1Alpha1( metadata=Metadata(name=server_name, annotations=annotations), spec=AmaltheaSessionSpec( + imagePullSecrets=[ImagePullSecret(name=image_pull_secret_name, adopt=True)] + if image_pull_secret_name + else [], codeRepositories=[], hibernated=False, reconcileStrategy=ReconcileStrategy.whenFailedOrHibernated, - priorityClassName=quota, + priorityClassName=resource_class.quota, session=Session( image=image, + imagePullPolicy=ImagePullPolicy.Always, urlPath=ui_path, port=environment.port, storage=Storage( - className=self.nb_config.sessions.storage.pvs_storage_class, - size=str(body.disk_storage) + "G", - mountPath=environment.mount_directory.as_posix() - if environment.mount_directory - else storage_mount_fallback.as_posix(), + className=storage_class, + size=SizeStr(str(body.disk_storage) + "G"), + mountPath=storage_mount.as_posix(), ), workingDir=work_dir.as_posix(), runAsUser=environment.uid, runAsGroup=environment.gid, - resources=Resources(requests=requests, limits=limits if len(limits) > 0 else None), - extraVolumeMounts=[], + resources=resources_from_resource_class(resource_class), + extraVolumeMounts=extra_volume_mounts, command=environment.command, args=environment.args, - shmSize="1G", - env=[ - SessionEnvItem(name="RENKU_BASE_URL_PATH", value=base_server_path), - SessionEnvItem(name="RENKU_BASE_URL", value=base_server_url), - ], - ), - ingress=Ingress( - host=self.nb_config.sessions.ingress.host, - ingressClassName=self.nb_config.sessions.ingress.annotations.get("kubernetes.io/ingress.class"), - annotations=self.nb_config.sessions.ingress.annotations, - tlsSecret=TlsSecret(adopt=False, name=self.nb_config.sessions.ingress.tls_secret) - if self.nb_config.sessions.ingress.tls_secret is not None - else None, - pathPrefix=base_server_path, + shmSize=ShmSizeStr("1G"), + env=env, ), + ingress=ingress, extraContainers=extra_containers, - initContainers=session_init_containers, + initContainers=extra_init_containers, extraVolumes=extra_volumes, - culling=Culling( - maxAge=f"{self.nb_config.sessions.culling.registered.max_age_seconds}s", - maxFailedDuration=f"{self.nb_config.sessions.culling.registered.failed_seconds}s", - maxHibernatedDuration=f"{self.nb_config.sessions.culling.registered.hibernated_seconds}s", - maxIdleDuration=f"{self.nb_config.sessions.culling.registered.idle_seconds}s", - maxStartingDuration=f"{self.nb_config.sessions.culling.registered.pending_seconds}s", - ), + culling=get_culling(user, resource_pool, self.nb_config), authentication=Authentication( enabled=True, type=AuthenticationType.oauth2proxy if isinstance(user, AuthenticatedAPIUser) else AuthenticationType.token, - secretRef=SecretRefKey(name=server_name, key="auth", adopt=True), - extraVolumeMounts=[ - # NOTE: Without subpath k8s keeps updating the secret and this can lead to - # the oauth2proxy restarting intermittently even when the secret does not change - # because the oauth2proxy watches this file and restarts on changes - ExtraVolumeMount( - name="renku-authorized-emails", - mountPath="/authorized_emails", - subPath="authorized_emails", - ) - ] - if isinstance(user, AuthenticatedAPIUser) - else [], + secretRef=auth_secret.key_ref("auth"), + extraVolumeMounts=[auth_secret.volume_mount] if auth_secret.volume_mount else [], ), dataSources=data_sources, - tolerations=tolerations, - affinity=affinity, + tolerations=tolerations_from_resource_class( + resource_class, self.nb_config.sessions.tolerations_model + ), + affinity=node_affinity_from_resource_class(resource_class, self.nb_config.sessions.affinity_model), + serviceAccountName=service_account_name, ), ) - parsed_proxy_url = urlparse(urljoin(base_server_url + "/", "oauth2")) - secret_data = {} - if isinstance(user, AuthenticatedAPIUser): - secret_data["auth"] = dumps( - { - "provider": "oidc", - "client_id": self.nb_config.sessions.oidc.client_id, - "oidc_issuer_url": self.nb_config.sessions.oidc.issuer_url, - "session_cookie_minimal": True, - "skip_provider_button": True, - # NOTE: If the redirect url is not HTTPS then some or identity providers will fail. - "redirect_url": urljoin(base_server_https_url + "/", "oauth2/callback"), - "cookie_path": base_server_path, - "proxy_prefix": parsed_proxy_url.path, - "authenticated_emails_file": "/authorized_emails", - "client_secret": self.nb_config.sessions.oidc.client_secret, - "cookie_secret": base64.urlsafe_b64encode(os.urandom(32)).decode(), - "insecure_oidc_allow_unverified_email": self.nb_config.sessions.oidc.allow_unverified_email, - } - ) - secret_data["authorized_emails"] = user.email - else: - # NOTE: We extract the session cookie value here in order to avoid creating a cookie. - # The gateway encrypts and signs cookies so the user ID injected in the request headers does not - # match the value of the session cookie. - session_id = cast(str | None, request.cookies.get(self.nb_config.session_id_cookie_name)) - if not session_id: - raise errors.UnauthorizedError( - message=f"You have to have a renku session cookie at {self.nb_config.session_id_cookie_name} " - "in order to launch an anonymous session." - ) - # NOTE: Amalthea looks for the token value first in the cookie and then in the authorization header - secret_data["auth"] = safe_dump( - { - "authproxy": { - "token": session_id, - "cookie_key": self.nb_config.session_id_cookie_name, - "verbose": True, - } - } - ) - secrets_to_create.append(V1Secret(metadata=V1ObjectMeta(name=server_name), string_data=secret_data)) for s in secrets_to_create: - await self.nb_config.k8s_v2_client.create_secret(s) + await self.nb_config.k8s_v2_client.create_secret(s.secret, cluster) try: - manifest = await self.nb_config.k8s_v2_client.create_server(manifest, user.id) - except Exception: + manifest = await self.nb_config.k8s_v2_client.create_session(manifest, user) + except Exception as err: for s in secrets_to_create: - await self.nb_config.k8s_v2_client.delete_secret(s.metadata.name) - raise errors.ProgrammingError(message="Could not start the amalthea session") + await self.nb_config.k8s_v2_client.delete_secret(s.secret.metadata.name, cluster) + raise errors.ProgrammingError(message="Could not start the amalthea session") from err else: - owner_reference = { - "apiVersion": manifest.apiVersion, - "kind": manifest.kind, - "name": manifest.metadata.name, - "uid": manifest.metadata.uid, - } - secrets_url = self.nb_config.user_secrets.secrets_storage_service_url + "/api/secrets/kubernetes" - headers = {"Authorization": f"bearer {user.access_token}"} try: - for s_id, secrets in dcs_secrets.items(): - if len(secrets) == 0: - continue - request_data = { - "name": f"{server_name}-ds-{s_id.lower()}-secrets", - "namespace": self.nb_config.k8s_v2_client.preferred_namespace, - "secret_ids": [str(secret.secret_id) for secret in secrets], - "owner_references": [owner_reference], - "key_mapping": {str(secret.secret_id): secret.name for secret in secrets}, - } - async with httpx.AsyncClient(timeout=10) as client: - res = await client.post(secrets_url, headers=headers, json=request_data) - if res.status_code >= 300 or res.status_code < 200: - raise errors.ProgrammingError( - message=f"The secret for data connector with {s_id} could not be " - f"successfully created, the status code was {res.status_code}." - "Please contact a Renku administrator.", - detail=res.text, - ) + await request_session_secret_creation(user, self.nb_config, manifest, session_secrets) + await request_dc_secret_creation(user, self.nb_config, manifest, enc_secrets) except Exception: - await self.nb_config.k8s_v2_client.delete_server(server_name, user.id) + await self.nb_config.k8s_v2_client.delete_session(server_name, user.id) raise + await self.metrics.user_requested_session_launch( + user=user, + metadata={ + "cpu": int(resource_class.cpu * 1000), + "memory": resource_class.memory, + "gpu": resource_class.gpu, + "storage": body.disk_storage, + "resource_class_id": resource_class.id, + "resource_pool_id": resource_pool.id or "", + "resource_class_name": f"{resource_pool.name}.{resource_class.name}", + "session_id": server_name, + }, + ) return json(manifest.as_apispec().model_dump(mode="json", exclude_none=True), 201) return "/sessions", ["POST"], _handler @@ -616,7 +503,7 @@ def get_all(self) -> BlueprintFactoryResponse: @authenticate(self.authenticator) async def _handler(_: Request, user: AuthenticatedAPIUser | AnonymousAPIUser) -> HTTPResponse: - sessions = await self.nb_config.k8s_v2_client.list_servers(user.id) + sessions = await self.nb_config.k8s_v2_client.list_sessions(user.id) output: list[dict] = [] for session in sessions: output.append(session.as_apispec().model_dump(exclude_none=True, mode="json")) @@ -629,7 +516,7 @@ def get_one(self) -> BlueprintFactoryResponse: @authenticate(self.authenticator) async def _handler(_: Request, user: AuthenticatedAPIUser | AnonymousAPIUser, session_id: str) -> HTTPResponse: - session = await self.nb_config.k8s_v2_client.get_server(session_id, user.id) + session = await self.nb_config.k8s_v2_client.get_session(session_id, user.id) if session is None: raise errors.ValidationError(message=f"The session with ID {session_id} does not exist.", quiet=True) return json(session.as_apispec().model_dump(exclude_none=True, mode="json")) @@ -641,7 +528,8 @@ def delete(self) -> BlueprintFactoryResponse: @authenticate(self.authenticator) async def _handler(_: Request, user: AuthenticatedAPIUser | AnonymousAPIUser, session_id: str) -> HTTPResponse: - await self.nb_config.k8s_v2_client.delete_server(session_id, user.id) + await self.nb_config.k8s_v2_client.delete_session(session_id, user.id) + await self.metrics.session_stopped(user, metadata={"session_id": session_id}) return empty() return "/sessions/", ["DELETE"], _handler @@ -649,62 +537,25 @@ async def _handler(_: Request, user: AuthenticatedAPIUser | AnonymousAPIUser, se def patch(self) -> BlueprintFactoryResponse: """Patch a session.""" - @authenticate(self.authenticator) + @authenticate_2(self.authenticator, self.internal_gitlab_authenticator) @validate(json=apispec.SessionPatchRequest) async def _handler( _: Request, user: AuthenticatedAPIUser | AnonymousAPIUser, + internal_gitlab_user: APIUser, session_id: str, body: apispec.SessionPatchRequest, ) -> HTTPResponse: - session = await self.nb_config.k8s_v2_client.get_server(session_id, user.id) - if session is None: - raise errors.MissingResourceError( - message=f"The sesison with ID {session_id} does not exist", quiet=True - ) - # TODO: Some patching should only be done when the session is in some states to avoid inadvertent restarts - patches: dict[str, Any] = {} - if body.resource_class_id is not None: - rcs = await self.rp_repo.get_classes(user, id=body.resource_class_id) - if len(rcs) == 0: - raise errors.MissingResourceError( - message=f"The resource class you requested with ID {body.resource_class_id} does not exist", - quiet=True, - ) - rc = rcs[0] - patches |= dict( - spec=dict( - session=dict( - resources=dict(requests=dict(cpu=f"{round(rc.cpu * 1000)}m", memory=f"{rc.memory}Gi")) - ) - ) - ) - # TODO: Add a config to specifiy the gpu kind, there is also GpuKind enum in reosurce_pools - patches["spec"]["session"]["resources"]["requests"]["nvidia.com/gpu"] = rc.gpu - # NOTE: K8s fails if the gpus limit is not equal to the requests because it cannot be overcommited - patches["spec"]["session"]["resources"]["limits"] = {"nvidia.com/gpu": rc.gpu} - if ( - body.state is not None - and body.state.value.lower() == State.Hibernated.value.lower() - and body.state.value.lower() != session.status.state.value.lower() - ): - if "spec" not in patches: - patches["spec"] = {} - patches["spec"]["hibernated"] = True - elif ( - body.state is not None - and body.state.value.lower() == State.Running.value.lower() - and session.status.state.value.lower() != body.state.value.lower() - ): - if "spec" not in patches: - patches["spec"] = {} - patches["spec"]["hibernated"] = False - - if len(patches) > 0: - new_session = await self.nb_config.k8s_v2_client.patch_server(session_id, user.id, patches) - else: - new_session = session - + new_session = await patch_session( + body, + session_id, + self.nb_config, + user, + internal_gitlab_user, + rp_repo=self.rp_repo, + project_repo=self.project_repo, + metrics=self.metrics, + ) return json(new_session.as_apispec().model_dump(exclude_none=True, mode="json")) return "/sessions/", ["PATCH"], _handler @@ -720,7 +571,7 @@ async def _handler( session_id: str, query: apispec.SessionsSessionIdLogsGetParametersQuery, ) -> HTTPResponse: - logs = await self.nb_config.k8s_v2_client.get_server_logs(session_id, user.id, query.max_lines) + logs = await self.nb_config.k8s_v2_client.get_session_logs(session_id, user.id, query.max_lines) return json(apispec.SessionLogsResponse.model_validate(logs).model_dump(exclude_none=True)) return "/sessions//logs", ["GET"], _handler diff --git a/components/renku_data_services/notebooks/config/__init__.py b/components/renku_data_services/notebooks/config/__init__.py index 56fb427f2..875b675de 100644 --- a/components/renku_data_services/notebooks/config/__init__.py +++ b/components/renku_data_services/notebooks/config/__init__.py @@ -1,47 +1,48 @@ -"""Base motebooks svc configuration.""" +"""Base notebooks svc configuration.""" import os from dataclasses import dataclass, field from typing import Any, Optional, Protocol, Self +import kr8s + from renku_data_services.base_models import APIUser -from renku_data_services.crc.db import ResourcePoolRepository +from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository from renku_data_services.crc.models import ResourceClass from renku_data_services.db_config.config import DBConfig -from renku_data_services.k8s.clients import K8sCoreClient, K8sSchedulingClient +from renku_data_services.k8s.clients import ( + DummyCoreClient, + DummySchedulingClient, + K8sClusterClientsPool, + K8sCoreClient, + K8sSchedulingClient, +) +from renku_data_services.k8s.config import KubeConfigEnv, get_clusters from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s_watcher import K8sDbCache from renku_data_services.notebooks.api.classes.data_service import ( - CloudStorageConfig, CRCValidator, DummyCRCValidator, DummyGitProviderHelper, - DummyStorageValidator, GitProviderHelper, - StorageValidator, -) -from renku_data_services.notebooks.api.classes.k8s_client import ( - AmaltheaSessionV1Alpha1Kr8s, - JupyterServerV1Alpha1Kr8s, - K8sClient, - NamespacedK8sClient, - ServerCache, ) +from renku_data_services.notebooks.api.classes.k8s_client import NotebookK8sClient from renku_data_services.notebooks.api.classes.repository import GitProvider from renku_data_services.notebooks.api.schemas.server_options import ServerOptions from renku_data_services.notebooks.config.dynamic import ( - _AmaltheaConfig, - _AmaltheaV2Config, + ServerOptionsConfig, _CloudStorage, _GitConfig, _K8sConfig, _parse_str_as_bool, _SentryConfig, - _ServerOptionsConfig, _SessionConfig, _UserSecrets, ) from renku_data_services.notebooks.config.static import _ServersGetEndpointAnnotations +from renku_data_services.notebooks.constants import AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK from renku_data_services.notebooks.crs import AmaltheaSessionV1Alpha1, JupyterServerV1Alpha1 +from renku_data_services.session.constants import BUILD_RUN_GVK, TASK_RUN_GVK class CRCValidatorProto(Protocol): @@ -67,49 +68,60 @@ async def find_acceptable_class( ... -class StorageValidatorProto(Protocol): - """Cloud storage validator protocol.""" +class GitProviderHelperProto(Protocol): + """Git provider protocol.""" - async def get_storage_by_id( - self, user: APIUser, internal_gitlab_user: APIUser, project_id: int, storage_id: str - ) -> CloudStorageConfig: - """Get storage by ID.""" + async def get_providers(self, user: APIUser) -> list[GitProvider]: + """Get a list of git providers.""" ... - async def validate_storage_configuration(self, configuration: dict[str, Any], source_path: str) -> None: - """Validate a storage configuration.""" - ... - async def obscure_password_fields_for_storage(self, configuration: dict[str, Any]) -> dict[str, Any]: - """Obscure passsword fields in storage credentials.""" - ... +class Kr8sApiStack: + """Class maintaining a stack of current api clients. + Used for testing. + """ -class GitProviderHelperProto(Protocol): - """Git provider protocol.""" + stack: list[kr8s.Api] = list() - async def get_providers(self, user: APIUser) -> list[GitProvider]: - """Get a list of git providers.""" - ... + def push(self, api: kr8s.Api) -> None: + """Push a new api client onto the stack.""" + self.stack.append(api) + + def pop(self) -> kr8s.Api: + """Pop the current kr8s api client from the stack.""" + return self.stack.pop() + + @property + def current(self) -> kr8s.Api: + """Get the currently active api client.""" + return self.stack[-1] + + def __getattribute__(self, name: str) -> Any: + """Pass on requests to current api client.""" + if name in ["push", "pop", "current", "stack"]: + return object.__getattribute__(self, name) + return object.__getattribute__(self.current, name) @dataclass class NotebooksConfig: """The notebooks configuration.""" - server_options: _ServerOptionsConfig + server_options: ServerOptionsConfig sessions: _SessionConfig - amalthea: _AmaltheaConfig sentry: _SentryConfig git: _GitConfig k8s: _K8sConfig + k8s_db_cache: K8sDbCache + _kr8s_api: kr8s.asyncio.Api cloud_storage: _CloudStorage user_secrets: _UserSecrets crc_validator: CRCValidatorProto - storage_validator: StorageValidatorProto git_provider_helper: GitProviderHelperProto - k8s_client: K8sClient[JupyterServerV1Alpha1, JupyterServerV1Alpha1Kr8s] - k8s_v2_client: K8sClient[AmaltheaSessionV1Alpha1, AmaltheaSessionV1Alpha1Kr8s] + k8s_client: NotebookK8sClient[JupyterServerV1Alpha1] + k8s_v2_client: NotebookK8sClient[AmaltheaSessionV1Alpha1] + cluster_rp: ClusterRepository current_resource_schema_version: int = 1 anonymous_sessions_enabled: bool = False ssh_enabled: bool = False @@ -129,57 +141,66 @@ def from_env(cls, db_config: DBConfig) -> Self: dummy_stores = _parse_str_as_bool(os.environ.get("DUMMY_STORES", False)) sessions_config: _SessionConfig git_config: _GitConfig + kr8s_api: kr8s.asyncio.Api data_service_url = os.environ.get("NB_DATA_SERVICE_URL", "http://127.0.0.1:8000") - server_options = _ServerOptionsConfig.from_env() + server_options = ServerOptionsConfig.from_env() crc_validator: CRCValidatorProto - storage_validator: StorageValidatorProto git_provider_helper: GitProviderHelperProto k8s_namespace = os.environ.get("K8S_NAMESPACE", "default") - quota_repo: QuotaRepository + kube_config_root = os.environ.get("K8S_CONFIGS_ROOT", "/secrets/kube_configs") + if dummy_stores: + quota_repo = QuotaRepository(DummyCoreClient({}, {}), DummySchedulingClient({}), namespace=k8s_namespace) + rp_repo = ResourcePoolRepository(db_config.async_session_maker, quota_repo) crc_validator = DummyCRCValidator() sessions_config = _SessionConfig._for_testing() - storage_validator = DummyStorageValidator() git_provider_helper = DummyGitProviderHelper() - amalthea_config = _AmaltheaConfig(cache_url="http://not.specified") - amalthea_v2_config = _AmaltheaV2Config(cache_url="http://not.specified") git_config = _GitConfig("http://not.specified", "registry.not.specified") + kr8s_api = Kr8sApiStack() # type: ignore[assignment] else: quota_repo = QuotaRepository(K8sCoreClient(), K8sSchedulingClient(), namespace=k8s_namespace) rp_repo = ResourcePoolRepository(db_config.async_session_maker, quota_repo) crc_validator = CRCValidator(rp_repo) sessions_config = _SessionConfig.from_env() - storage_validator = StorageValidator(data_service_url) - amalthea_config = _AmaltheaConfig.from_env() - amalthea_v2_config = _AmaltheaV2Config.from_env() git_config = _GitConfig.from_env() git_provider_helper = GitProviderHelper( data_service_url, f"http://{sessions_config.ingress.host}", git_config.url ) + # NOTE: we need to get an async client as a sync client can't be used in an async way + # But all the config code is not async, so we need to drop into the running loop, if there is one + kr8s_api = KubeConfigEnv().api() k8s_config = _K8sConfig.from_env() - renku_ns_client = NamespacedK8sClient( - k8s_config.renku_namespace, JupyterServerV1Alpha1, JupyterServerV1Alpha1Kr8s - ) - js_cache = ServerCache(amalthea_config.cache_url, JupyterServerV1Alpha1) - k8s_client = K8sClient( - cache=js_cache, - renku_ns_client=renku_ns_client, - username_label="renku.io/safe-username", + k8s_db_cache = K8sDbCache(db_config.async_session_maker) + cluster_rp = ClusterRepository(db_config.async_session_maker) + client = K8sClusterClientsPool( + get_clusters=get_clusters( + kube_conf_root_dir=kube_config_root, + namespace=k8s_config.renku_namespace, + api=kr8s_api, + cluster_rp=cluster_rp, + ), + cache=k8s_db_cache, + kinds_to_cache=[AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK, BUILD_RUN_GVK, TASK_RUN_GVK], ) - v2_cache = ServerCache(amalthea_v2_config.cache_url, AmaltheaSessionV1Alpha1) - renku_ns_v2_client = NamespacedK8sClient( - k8s_config.renku_namespace, AmaltheaSessionV1Alpha1, AmaltheaSessionV1Alpha1Kr8s + k8s_client = NotebookK8sClient( + client=client, + rp_repo=rp_repo, + session_type=JupyterServerV1Alpha1, + gvk=JUPYTER_SESSION_GVK, + username_label="renku.io/userId", ) - k8s_v2_client = K8sClient( - cache=v2_cache, - renku_ns_client=renku_ns_v2_client, + k8s_v2_client = NotebookK8sClient( + client=client, + rp_repo=rp_repo, + # NOTE: v2 sessions have no userId label, the safe-username label is the keycloak user ID + session_type=AmaltheaSessionV1Alpha1, + gvk=AMALTHEA_SESSION_GVK, username_label="renku.io/safe-username", ) return cls( server_options=server_options, sessions=sessions_config, - amalthea=amalthea_config, sentry=_SentryConfig.from_env(), git=git_config, k8s=k8s_config, @@ -193,8 +214,10 @@ def from_env(cls, db_config: DBConfig) -> Self: data_service_url=data_service_url, dummy_stores=dummy_stores, crc_validator=crc_validator, - storage_validator=storage_validator, git_provider_helper=git_provider_helper, k8s_client=k8s_client, k8s_v2_client=k8s_v2_client, + k8s_db_cache=k8s_db_cache, + cluster_rp=cluster_rp, + _kr8s_api=kr8s_api, ) diff --git a/components/renku_data_services/notebooks/config/dynamic.py b/components/renku_data_services/notebooks/config/dynamic.py index f5129c219..945aaec21 100644 --- a/components/renku_data_services/notebooks/config/dynamic.py +++ b/components/renku_data_services/notebooks/config/dynamic.py @@ -10,7 +10,8 @@ import yaml -from ..api.schemas.config_server_options import ServerOptionsChoices, ServerOptionsDefaults +from renku_data_services.notebooks.api.schemas.config_server_options import ServerOptionsChoices, ServerOptionsDefaults +from renku_data_services.notebooks.crs import Affinity, Toleration latest_version: str = "1.25.3" @@ -37,13 +38,15 @@ def _parse_value_as_float(val: Any) -> float: class CPUEnforcement(str, Enum): """CPU enforcement policies.""" - LAX: str = "lax" # CPU limit equals 3x cpu request - STRICT: str = "strict" # CPU limit equals cpu request - OFF: str = "off" # no CPU limit at all + LAX = "lax" # CPU limit equals 3x cpu request + STRICT = "strict" # CPU limit equals cpu request + OFF = "off" # no CPU limit at all @dataclass -class _ServerOptionsConfig: +class ServerOptionsConfig: + """Config class for server options.""" + defaults: dict[str, str | bool | int | float] = field(init=False) ui_choices: dict[str, Any] = field(init=False) defaults_path: str = "/etc/renku-notebooks/server_options/server_defaults.json" @@ -57,14 +60,17 @@ def __post_init__(self) -> None: @property def lfs_auto_fetch_default(self) -> bool: + """Whether lfs autofetch is enabled or not.""" return str(self.defaults.get("lfs_auto_fetch", "false")).lower() == "true" @property def default_url_default(self) -> str: + """Default url (path) for session.""" return str(self.defaults.get("defaultUrl", "/lab")) @classmethod def from_env(cls) -> Self: + """Load config from environment variables.""" return cls( os.environ["NB_SERVER_OPTIONS__DEFAULTS_PATH"], os.environ["NB_SERVER_OPTIONS__UI_CHOICES_PATH"], @@ -205,7 +211,7 @@ class _CustomCaCertsConfig: def from_env(cls) -> Self: return cls( image=os.environ.get("NB_SESSIONS__CA_CERTS__IMAGE", "renku/certificates:0.0.2"), - path=os.environ.get("NB_SESSIONS__CA_CERTS__PATH", "/auth/realms/Renku/.well-known/openid-configuration"), + path=os.environ.get("NB_SESSIONS__CA_CERTS__PATH", "/usr/local/share/ca-certificates"), secrets=yaml.safe_load(StringIO(os.environ.get("NB_SESSIONS__CA_CERTS__SECRETS", "[]"))), ) @@ -258,14 +264,15 @@ def from_env(cls) -> Self: annotations=yaml.safe_load(StringIO(os.environ.get("NB_SESSIONS__INGRESS__ANNOTATIONS", "{}"))), ) - def base_path(self, server_name: str) -> str: + @staticmethod + def base_path(server_name: str) -> str: return f"/sessions/{server_name}" def base_url(self, server_name: str, force_https: bool = False) -> str: scheme = "https" if self.tls_secret else "http" if force_https: scheme = "https" - return urlunparse((scheme, self.host, self.base_path(server_name), None, None, None)) + return str(urlunparse((scheme, self.host, self.base_path(server_name), None, None, None))) @dataclass @@ -444,6 +451,14 @@ def _for_testing(cls) -> Self: tolerations=yaml.safe_load(StringIO(os.environ.get("", "[]"))), ) + @property + def affinity_model(self) -> Affinity: + return Affinity.model_validate(self.affinity) + + @property + def tolerations_model(self) -> list[Toleration]: + return [Toleration.model_validate(tol) for tol in self.tolerations] + @dataclass class _K8sConfig: @@ -458,7 +473,7 @@ def from_env(cls) -> Self: @dataclass class _DynamicConfig: - server_options: _ServerOptionsConfig + server_options: ServerOptionsConfig sessions: _SessionConfig amalthea: _AmaltheaConfig sentry: _SentryConfig @@ -471,7 +486,7 @@ class _DynamicConfig: @classmethod def from_env(cls) -> Self: return cls( - server_options=_ServerOptionsConfig.from_env(), + server_options=ServerOptionsConfig.from_env(), sessions=_SessionConfig.from_env(), amalthea=_AmaltheaConfig.from_env(), sentry=_SentryConfig.from_env("NB_SENTRY_"), diff --git a/components/renku_data_services/notebooks/constants.py b/components/renku_data_services/notebooks/constants.py new file mode 100644 index 000000000..d66304178 --- /dev/null +++ b/components/renku_data_services/notebooks/constants.py @@ -0,0 +1,8 @@ +"""Constant values used for notebooks.""" + +from typing import Final + +from renku_data_services.k8s.models import GVK + +AMALTHEA_SESSION_GVK: Final[GVK] = GVK(group="amalthea.dev", version="v1alpha1", kind="AmaltheaSession") +JUPYTER_SESSION_GVK: Final[GVK] = GVK(group="amalthea.dev", version="v1alpha1", kind="JupyterServer") diff --git a/components/renku_data_services/notebooks/core.py b/components/renku_data_services/notebooks/core.py index dd5c31212..f5d09a62a 100644 --- a/components/renku_data_services/notebooks/core.py +++ b/components/renku_data_services/notebooks/core.py @@ -1,4 +1,4 @@ -"""Notebooks service core implementation.""" +"""Notebooks service core implementation, specifically for JupyterServer sessions.""" import json as json_lib from datetime import UTC, datetime @@ -6,33 +6,40 @@ from pathlib import PurePosixPath from typing import Any -import requests +import escapism +import httpx from gitlab.const import Visibility as GitlabVisibility from gitlab.v4.objects.projects import Project as GitlabProject -from sanic.log import logger +from sanic.response import JSONResponse +from ulid import ULID +from renku_data_services.app_config import logging from renku_data_services.base_models import AnonymousAPIUser, APIUser, AuthenticatedAPIUser +from renku_data_services.base_models.validation import validated_json from renku_data_services.errors import errors from renku_data_services.notebooks import apispec from renku_data_services.notebooks.api.classes.auth import GitlabToken, RenkuTokens from renku_data_services.notebooks.api.classes.image import Image from renku_data_services.notebooks.api.classes.repository import Repository -from renku_data_services.notebooks.api.classes.server import Renku1UserServer, Renku2UserServer, UserServer +from renku_data_services.notebooks.api.classes.server import Renku1UserServer, UserServer from renku_data_services.notebooks.api.classes.server_manifest import UserServerManifest from renku_data_services.notebooks.api.classes.user import NotebooksGitlabClient from renku_data_services.notebooks.api.schemas.cloud_storage import RCloneStorage from renku_data_services.notebooks.api.schemas.secrets import K8sUserSecrets from renku_data_services.notebooks.api.schemas.server_options import ServerOptions +from renku_data_services.notebooks.api.schemas.servers_get import NotebookResponse from renku_data_services.notebooks.api.schemas.servers_patch import PatchServerStatusEnum from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK from renku_data_services.notebooks.errors import intermittent from renku_data_services.notebooks.errors import user as user_errors from renku_data_services.notebooks.util import repository -from renku_data_services.notebooks.util.kubernetes_ import ( - find_container, - renku_1_make_server_name, - renku_2_make_server_name, -) +from renku_data_services.notebooks.util.kubernetes_ import find_container, renku_1_make_server_name +from renku_data_services.storage.db import StorageRepository +from renku_data_services.storage.models import CloudStorage +from renku_data_services.users.db import UserRepo + +logger = logging.getLogger(__name__) def notebooks_info(config: NotebooksConfig) -> dict: @@ -68,11 +75,11 @@ def notebooks_info(config: NotebooksConfig) -> dict: async def user_servers( config: NotebooksConfig, user: AnonymousAPIUser | AuthenticatedAPIUser, filter_attrs: list[dict] -) -> dict: +) -> dict[str, UserServerManifest]: """Returns a filtered list of servers for the given user.""" servers = [ - UserServerManifest(s, config.sessions.default_image) for s in await config.k8s_client.list_servers(user.id) + UserServerManifest(s, config.sessions.default_image) for s in await config.k8s_client.list_sessions(user.id) ] filtered_servers = {} ann_prefix = config.session_get_endpoint_annotations.renku_annotation_prefix @@ -87,7 +94,7 @@ async def user_server( ) -> UserServerManifest: """Returns the requested server for the user.""" - server = await config.k8s_client.get_server(server_name, user.id) + server = await config.k8s_client.get_session(server_name, user.id) if server is None: raise errors.MissingResourceError(message=f"The server {server_name} does not exist.") return UserServerManifest(server, config.sessions.default_image) @@ -105,7 +112,7 @@ async def patch_server( if not config.sessions.storage.pvs_enabled: raise intermittent.PVDisabledError() - server = await config.k8s_client.get_server(server_name, user.id) + server = await config.k8s_client.get_session(server_name, user.id) if server is None: raise errors.MissingResourceError(message=f"The server with name {server_name} cannot be found") if server.spec is None: @@ -157,8 +164,8 @@ async def patch_server( "path": "/metadata/labels/renku.io~1quota", } ) - new_server = await config.k8s_client.patch_server( - server_name=server_name, safe_username=user.id, patch=js_patch + new_server = await config.k8s_client.patch_session( + session_name=server_name, safe_username=user.id, patch=js_patch ) ss_patch: list[dict[str, Any]] = [ { @@ -167,7 +174,7 @@ async def patch_server( "value": parsed_server_options.priority_class, } ] - await config.k8s_client.patch_statefulset(server_name=server_name, patch=ss_patch) + await config.k8s_client.patch_statefulset(session_name=server_name, safe_username=user.id, patch=ss_patch) if state == PatchServerStatusEnum.Hibernated: # NOTE: Do nothing if server is already hibernated @@ -217,7 +224,7 @@ async def patch_server( }, } - new_server = await config.k8s_client.patch_server(server_name=server_name, safe_username=user.id, patch=patch) + new_server = await config.k8s_client.patch_session(session_name=server_name, safe_username=user.id, patch=patch) elif state == PatchServerStatusEnum.Running: # NOTE: We clear hibernation annotations in Amalthea to avoid flickering in the UI (showing # the repository as dirty when resuming a session for a short period of time). @@ -239,8 +246,8 @@ async def patch_server( floor(user.access_token_expires_at.timestamp()) if user.access_token_expires_at is not None else -1 ), ) - await config.k8s_client.patch_tokens(server_name, renku_tokens, gitlab_token) - new_server = await config.k8s_client.patch_server(server_name=server_name, safe_username=user.id, patch=patch) + await config.k8s_client.patch_session_tokens(server_name, user.id, renku_tokens, gitlab_token) + new_server = await config.k8s_client.patch_session(session_name=server_name, safe_username=user.id, patch=patch) return UserServerManifest(new_server, config.sessions.default_image) @@ -248,7 +255,7 @@ async def patch_server( async def stop_server(config: NotebooksConfig, user: AnonymousAPIUser | AuthenticatedAPIUser, server_name: str) -> None: """Stops / deletes the requested server.""" - await config.k8s_client.delete_server(server_name, safe_username=user.id) + await config.k8s_client.delete_session(server_name, safe_username=user.id) def server_options(config: NotebooksConfig) -> dict: @@ -267,8 +274,8 @@ async def server_logs( ) -> dict: """Returns the logs of the given server.""" - return await config.k8s_client.get_server_logs( - server_name=server_name, + return await config.k8s_client.get_session_logs( + session_name=server_name, safe_username=user.id, max_log_lines=max_lines, ) @@ -305,7 +312,7 @@ async def launch_notebook_helper( server_name: str, server_class: type[UserServer], user: AnonymousAPIUser | AuthenticatedAPIUser, - image: str, + image: str | None, resource_class_id: int | None, storage: int | None, environment_variables: dict[str, str], @@ -314,21 +321,20 @@ async def launch_notebook_helper( lfs_auto_fetch: bool, cloudstorage: list[apispec.RCloneStorageRequest], server_options: ServerOptions | dict | None, - namespace: str | None, # Renku 1.0 + gl_namespace: str | None, # Renku 1.0 project: str | None, # Renku 1.0 branch: str | None, # Renku 1.0 commit_sha: str | None, # Renku 1.0 - notebook: str | None, # Renku 1.0 gl_project: GitlabProject | None, # Renku 1.0 gl_project_path: str | None, # Renku 1.0 - project_id: str | None, # Renku 2.0 - launcher_id: str | None, # Renku 2.0 repositories: list[apispec.LaunchNotebookRequestRepository] | None, # Renku 2.0 internal_gitlab_user: APIUser, + user_repo: UserRepo, + storage_repo: StorageRepository, ) -> tuple[UserServerManifest, int]: """Helper function to launch a Jupyter server.""" - server = await nb_config.k8s_client.get_server(server_name, user.id) + server = await nb_config.k8s_client.get_session(server_name, user.id) if server: return UserServerManifest(server, nb_config.sessions.default_image, nb_config.sessions.storage.pvs_enabled), 200 @@ -342,7 +348,7 @@ async def launch_notebook_helper( # A specific image was requested parsed_image = Image.from_path(image) image_repo = parsed_image.repo_api() - image_exists_publicly = image_repo.image_exists(parsed_image) + image_exists_publicly = await image_repo.image_exists(parsed_image) image_exists_privately = False if ( not image_exists_publicly @@ -376,7 +382,7 @@ async def launch_notebook_helper( image_repo = parsed_image.repo_api().maybe_with_oauth2_token( nb_config.git.registry, internal_gitlab_user.access_token ) - if not image_repo.image_exists(parsed_image): + if not await image_repo.image_exists(parsed_image): raise errors.MissingResourceError( message=( f"Cannot start the session because the following the image {image} does not " @@ -386,10 +392,19 @@ async def launch_notebook_helper( else: raise user_errors.UserInputError(message="Cannot determine which Docker image to use.") + host = nb_config.sessions.ingress.host parsed_server_options: ServerOptions | None = None if resource_class_id is not None: # A resource class ID was passed in, validate with CRC service parsed_server_options = await nb_config.crc_validator.validate_class_storage(user, resource_class_id, storage) + k8s_cluster = await nb_config.k8s_client.cluster_by_class_id(resource_class_id, user) + if ( + p := await k8s_cluster.get_ingress_parameters( + user, nb_config.cluster_rp, nb_config.sessions.ingress, server_name + ) + ) is not None: + (_, _, _, ingress_host, _, _) = p + host = ingress_host elif server_options is not None: if isinstance(server_options, dict): requested_server_options = ServerOptions( @@ -404,14 +419,13 @@ async def launch_notebook_helper( requested_server_options = server_options else: raise errors.ProgrammingError( - message="Got an unexpected type of server options when " f"launching sessions: {type(server_options)}" + message=f"Got an unexpected type of server options when launching sessions: {type(server_options)}" ) # The old style API was used, try to find a matching class from the CRC service parsed_server_options = await nb_config.crc_validator.find_acceptable_class(user, requested_server_options) if parsed_server_options is None: raise user_errors.UserInputError( - message="Cannot find suitable server options based on your request and " - "the available resource classes.", + message="Cannot find suitable server options based on your request and the available resource classes.", detail="You are receiving this error because you are using the old API for " "selecting resources. Updating to the new API which includes specifying only " "a specific resource class ID and storage is preferred and more convenient.", @@ -444,21 +458,23 @@ async def launch_notebook_helper( storages: list[RCloneStorage] = [] if cloudstorage: - gl_project_id = gl_project.id if gl_project is not None else 0 + user_secret_key = await user_repo.get_or_create_user_secret_key(user) try: for cstorage in cloudstorage: + saved_storage: CloudStorage | None = None + if cstorage.storage_id: + saved_storage = await storage_repo.get_storage_by_id(ULID.from_str(cstorage.storage_id), user) storages.append( await RCloneStorage.storage_from_schema( - cstorage.model_dump(), - user=user, - project_id=gl_project_id, + data=cstorage.model_dump(), work_dir=server_work_dir, - config=nb_config, - internal_gitlab_user=internal_gitlab_user, + user_secret_key=user_secret_key, + saved_storage=saved_storage, + storage_class=nb_config.cloud_storage.storage_class, ) ) except errors.ValidationError as e: - raise user_errors.UserInputError(message=f"Couldn't load cloud storage config: {str(e)}") + raise user_errors.UserInputError(message=f"Couldn't load cloud storage config: {str(e)}") from e mount_points = set(s.mount_folder for s in storages if s.mount_folder and s.mount_folder != "/") if len(mount_points) != len(storages): raise user_errors.UserInputError( @@ -475,16 +491,14 @@ async def launch_notebook_helper( if user_secrets: k8s_user_secret = K8sUserSecrets(f"{server_name}-secret", **user_secrets.model_dump()) + # Renku 1-only parameters extra_kwargs: dict = dict( commit_sha=commit_sha, branch=branch, project=project, - namespace=namespace, - launcher_id=launcher_id, - project_id=project_id, - notebook=notebook, - internal_gitlab_user=internal_gitlab_user, # Renku 1 - gitlab_project=gl_project, # Renku 1 + gl_namespace=gl_namespace, + internal_gitlab_user=internal_gitlab_user, + gitlab_project=gl_project, ) server = server_class( user=user, @@ -501,6 +515,7 @@ async def launch_notebook_helper( is_image_private=is_image_private, repositories=[Repository.from_dict(r.model_dump()) for r in repositories], config=nb_config, + host=host, **extra_kwargs, ) @@ -517,100 +532,90 @@ async def launch_notebook_helper( logger.debug(f"Server {server.server_name} has been started") - if k8s_user_secret is not None: - owner_reference = { - "apiVersion": "amalthea.dev/v1alpha1", - "kind": "JupyterServer", - "name": server.server_name, - "uid": manifest.metadata.uid, - "controller": True, - } - request_data = { - "name": k8s_user_secret.name, - "namespace": server.k8s_client.preferred_namespace, - "secret_ids": [str(id_) for id_ in k8s_user_secret.user_secret_ids], - "owner_references": [owner_reference], - } - headers = {"Authorization": f"bearer {user.access_token}"} + owner_reference = { + "apiVersion": JUPYTER_SESSION_GVK.group_version, + "kind": JUPYTER_SESSION_GVK.kind, + "name": server.server_name, + "uid": manifest.metadata.uid, + } + async def create_secret(payload: dict[str, Any], type_message: str) -> None: async def _on_error(server_name: str, error_msg: str) -> None: - await nb_config.k8s_client.delete_server(server_name, safe_username=user.id) + await nb_config.k8s_client.delete_session(server_name, safe_username=user.id) raise RuntimeError(error_msg) try: - response = requests.post( - nb_config.user_secrets.secrets_storage_service_url + "/api/secrets/kubernetes", - json=request_data, - headers=headers, - timeout=10, - ) - except requests.exceptions.ConnectionError: - await _on_error(server.server_name, "User secrets storage service could not be contacted {exc}") + async with httpx.AsyncClient(timeout=10) as client: + response = await client.post( + nb_config.user_secrets.secrets_storage_service_url + "/api/secrets/kubernetes", + json=payload, + headers={"Authorization": f"bearer {user.access_token}"}, + ) + except httpx.ConnectError as exc: + await _on_error(server_name, f"{type_message} storage service could not be contacted {exc}") + else: + if response.status_code != 201: + await _on_error(server_name, f"{type_message} could not be created {response.json()}") - if response.status_code != 201: - await _on_error(server.server_name, f"User secret could not be created {response.json()}") + if k8s_user_secret is not None: + request_data: dict[str, Any] = { + "name": k8s_user_secret.name, + "namespace": server.k8s_namespace(), + "secret_ids": [str(id_) for id_ in k8s_user_secret.user_secret_ids], + "owner_references": [owner_reference], + } + await create_secret(payload=request_data, type_message="User secrets") + + # NOTE: Create a secret for each storage that has saved secrets + for icloud_storage, cloud_storage in enumerate(storages): + if cloud_storage.secrets and cloud_storage.base_name: + base_name = cloud_storage.base_name + if not base_name: + base_name = f"{server_name}-ds-{icloud_storage}" + request_data = { + "name": f"{base_name}-secrets", + "namespace": server.k8s_namespace(), + "secret_ids": list(cloud_storage.secrets.keys()), + "owner_references": [owner_reference], + "key_mapping": cloud_storage.secrets, + } + await create_secret(payload=request_data, type_message="Saved storage secrets") return UserServerManifest(manifest, nb_config.sessions.default_image), 201 async def launch_notebook( - config: NotebooksConfig, - user: AnonymousAPIUser | AuthenticatedAPIUser, - internal_gitlab_user: APIUser, - launch_request: apispec.LaunchNotebookRequest, -) -> tuple[UserServerManifest, int]: - """Starts a server.""" - - server_name = renku_2_make_server_name( - user=user, project_id=launch_request.project_id, launcher_id=launch_request.launcher_id - ) - return await launch_notebook_helper( - nb_config=config, - server_name=server_name, - server_class=Renku2UserServer, - user=user, - image=launch_request.image or config.sessions.default_image, - resource_class_id=launch_request.resource_class_id, - storage=launch_request.storage, - environment_variables=launch_request.environment_variables, - user_secrets=launch_request.user_secrets, - default_url=config.server_options.default_url_default, - lfs_auto_fetch=config.server_options.lfs_auto_fetch_default, - cloudstorage=launch_request.cloudstorage, - server_options=None, - namespace=None, - project=None, - branch=None, - commit_sha=None, - notebook=None, - gl_project=None, - gl_project_path=None, - project_id=launch_request.project_id, - launcher_id=launch_request.launcher_id, - repositories=launch_request.repositories, - internal_gitlab_user=internal_gitlab_user, - ) - - -async def launch_notebook_old( config: NotebooksConfig, user: AnonymousAPIUser | AuthenticatedAPIUser, internal_gitlab_user: APIUser, launch_request: apispec.LaunchNotebookRequestOld, + user_repo: UserRepo, + storage_repo: StorageRepository, ) -> tuple[UserServerManifest, int]: """Starts a server using the old operator.""" + cluster = await config.k8s_client.cluster_by_class_id(launch_request.resource_class_id, user) + + if isinstance(user, AnonymousAPIUser): + safe_username = escapism.escape(user.id, escape_char="-").lower() + else: + safe_username = escapism.escape(user.email, escape_char="-").lower() server_name = renku_1_make_server_name( - user.id, launch_request.namespace, launch_request.project, launch_request.branch, launch_request.commit_sha + safe_username, + launch_request.namespace, + launch_request.project, + launch_request.branch, + launch_request.commit_sha, + cluster.id, ) project_slug = f"{launch_request.namespace}/{launch_request.project}" - gitlab_client = NotebooksGitlabClient(config.git.url, APIUser.access_token) + gitlab_client = NotebooksGitlabClient(config.git.url, internal_gitlab_user.access_token) gl_project = gitlab_client.get_renku_project(project_slug) if gl_project is None: raise errors.MissingResourceError(message=f"Cannot find gitlab project with slug {project_slug}") gl_project_path = gl_project.path server_class = Renku1UserServer - server_options = ( + _server_options = ( ServerOptions.from_server_options_request_schema( launch_request.serverOptions.model_dump(), config.server_options.default_url_default, @@ -625,7 +630,7 @@ async def launch_notebook_old( server_name=server_name, server_class=server_class, user=user, - image=launch_request.image or config.sessions.default_image, + image=launch_request.image, resource_class_id=launch_request.resource_class_id, storage=launch_request.storage, environment_variables=launch_request.environment_variables, @@ -633,16 +638,34 @@ async def launch_notebook_old( default_url=launch_request.default_url, lfs_auto_fetch=launch_request.lfs_auto_fetch, cloudstorage=launch_request.cloudstorage, - server_options=server_options, - namespace=launch_request.namespace, + server_options=_server_options, + gl_namespace=launch_request.namespace, project=launch_request.project, branch=launch_request.branch, commit_sha=launch_request.commit_sha, - notebook=launch_request.notebook, gl_project=gl_project, gl_project_path=gl_project_path, - project_id=None, - launcher_id=None, repositories=None, internal_gitlab_user=internal_gitlab_user, + user_repo=user_repo, + storage_repo=storage_repo, + ) + + +def serialize_v1_server(manifest: UserServerManifest, nb_config: NotebooksConfig, status: int = 200) -> JSONResponse: + """Format and serialize a Renku v1 JupyterServer manifest.""" + data = NotebookResponse().dump(NotebookResponse.format_user_pod_data(manifest, nb_config)) + return validated_json(apispec.NotebookResponse, data, status=status, model_dump_kwargs=dict(by_alias=True)) + + +def serialize_v1_servers( + manifests: dict[str, UserServerManifest], nb_config: NotebooksConfig, status: int = 200 +) -> JSONResponse: + """Format and serialize many Renku v1 JupyterServer manifests.""" + data = { + manifest.server_name: NotebookResponse().dump(NotebookResponse.format_user_pod_data(manifest, nb_config)) + for manifest in sorted(manifests.values(), key=lambda x: x.server_name) + } + return validated_json( + apispec.ServersGetResponse, {"servers": data}, status=status, model_dump_kwargs=dict(by_alias=True) ) diff --git a/components/renku_data_services/notebooks/core_sessions.py b/components/renku_data_services/notebooks/core_sessions.py new file mode 100644 index 000000000..6a5c76df1 --- /dev/null +++ b/components/renku_data_services/notebooks/core_sessions.py @@ -0,0 +1,658 @@ +"""A selection of core functions for AmaltheaSessions.""" + +import base64 +import json +import os +import random +import string +from collections.abc import AsyncIterator +from datetime import timedelta +from pathlib import PurePosixPath +from typing import cast +from urllib.parse import urljoin, urlparse + +import httpx +from kubernetes.client import V1ObjectMeta, V1Secret +from sanic import Request +from toml import dumps +from yaml import safe_dump + +from renku_data_services.app_config import logging +from renku_data_services.base_models import APIUser +from renku_data_services.base_models.core import AnonymousAPIUser, AuthenticatedAPIUser +from renku_data_services.base_models.metrics import MetricsService +from renku_data_services.crc.db import ResourcePoolRepository +from renku_data_services.crc.models import GpuKind, ResourceClass, ResourcePool +from renku_data_services.data_connectors.models import DataConnectorSecret, DataConnectorWithSecrets +from renku_data_services.errors import errors +from renku_data_services.notebooks import apispec +from renku_data_services.notebooks.api.amalthea_patches import git_proxy, init_containers +from renku_data_services.notebooks.api.classes.image import Image +from renku_data_services.notebooks.api.classes.k8s_client import sanitizer +from renku_data_services.notebooks.api.classes.repository import GitProvider, Repository +from renku_data_services.notebooks.api.schemas.cloud_storage import RCloneStorage +from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.crs import ( + AmaltheaSessionV1Alpha1, + AmaltheaSessionV1Alpha1Patch, + AmaltheaSessionV1Alpha1SpecPatch, + AmaltheaSessionV1Alpha1SpecSessionPatch, + Culling, + DataSource, + ExtraContainer, + ExtraVolume, + ExtraVolumeMount, + ImagePullSecret, + InitContainer, + Limits, + LimitsStr, + Requests, + RequestsStr, + Resources, + SecretAsVolume, + SecretAsVolumeItem, + SessionEnvItem, + State, +) +from renku_data_services.notebooks.models import ExtraSecret +from renku_data_services.notebooks.utils import ( + node_affinity_from_resource_class, + tolerations_from_resource_class, +) +from renku_data_services.project.db import ProjectRepository +from renku_data_services.project.models import Project, SessionSecret +from renku_data_services.session.models import SessionLauncher +from renku_data_services.users.db import UserRepo +from renku_data_services.utils.cryptography import get_encryption_key + +logger = logging.getLogger(__name__) + + +async def get_extra_init_containers( + nb_config: NotebooksConfig, + user: AnonymousAPIUser | AuthenticatedAPIUser, + repositories: list[Repository], + git_providers: list[GitProvider], + storage_mount: PurePosixPath, + work_dir: PurePosixPath, + uid: int = 1000, + gid: int = 1000, +) -> tuple[list[InitContainer], list[ExtraVolume]]: + """Get all extra init containers that should be added to an amalthea session.""" + cert_init, cert_vols = init_containers.certificates_container(nb_config) + session_init_containers = [InitContainer.model_validate(sanitizer(cert_init))] + extra_volumes = [ExtraVolume.model_validate(sanitizer(volume)) for volume in cert_vols] + git_clone = await init_containers.git_clone_container_v2( + user=user, + config=nb_config, + repositories=repositories, + git_providers=git_providers, + workspace_mount_path=storage_mount, + work_dir=work_dir, + uid=uid, + gid=gid, + ) + if git_clone is not None: + session_init_containers.append(InitContainer.model_validate(git_clone)) + return session_init_containers, extra_volumes + + +async def get_extra_containers( + nb_config: NotebooksConfig, + user: AnonymousAPIUser | AuthenticatedAPIUser, + repositories: list[Repository], + git_providers: list[GitProvider], +) -> list[ExtraContainer]: + """Get the extra containers added to amalthea sessions.""" + conts: list[ExtraContainer] = [] + git_proxy_container = await git_proxy.main_container( + user=user, config=nb_config, repositories=repositories, git_providers=git_providers + ) + if git_proxy_container: + conts.append(ExtraContainer.model_validate(sanitizer(git_proxy_container))) + return conts + + +async def get_auth_secret_authenticated( + nb_config: NotebooksConfig, + user: AuthenticatedAPIUser, + server_name: str, + base_server_url: str, + base_server_https_url: str, + base_server_path: str, +) -> ExtraSecret: + """Get the extra secrets that need to be added to the session for an authenticated user.""" + secret_data = {} + + parsed_proxy_url = urlparse(urljoin(base_server_url + "/", "oauth2")) + vol = ExtraVolume( + name="renku-authorized-emails", + secret=SecretAsVolume( + secretName=server_name, + items=[SecretAsVolumeItem(key="authorized_emails", path="authorized_emails")], + ), + ) + secret_data["auth"] = dumps( + { + "provider": "oidc", + "client_id": nb_config.sessions.oidc.client_id, + "oidc_issuer_url": nb_config.sessions.oidc.issuer_url, + "session_cookie_minimal": True, + "skip_provider_button": True, + # NOTE: If the redirect url is not HTTPS then some or identity providers will fail. + "redirect_url": urljoin(base_server_https_url + "/", "oauth2/callback"), + "cookie_path": base_server_path, + "proxy_prefix": parsed_proxy_url.path, + "authenticated_emails_file": "/authorized_emails", + "client_secret": nb_config.sessions.oidc.client_secret, + "cookie_secret": base64.urlsafe_b64encode(os.urandom(32)).decode(), + "insecure_oidc_allow_unverified_email": nb_config.sessions.oidc.allow_unverified_email, + } + ) + secret_data["authorized_emails"] = user.email + secret = V1Secret(metadata=V1ObjectMeta(name=server_name), string_data=secret_data) + vol_mount = ExtraVolumeMount( + name="renku-authorized-emails", + mountPath="/authorized_emails", + subPath="authorized_emails", + ) + return ExtraSecret(secret, vol, vol_mount) + + +async def get_auth_secret_anonymous(nb_config: NotebooksConfig, server_name: str, request: Request) -> ExtraSecret: + """Get the extra secrets that need to be added to the session for an anonymous user.""" + # NOTE: We extract the session cookie value here in order to avoid creating a cookie. + # The gateway encrypts and signs cookies so the user ID injected in the request headers does not + # match the value of the session cookie. + session_id = cast(str | None, request.cookies.get(nb_config.session_id_cookie_name)) + if not session_id: + raise errors.UnauthorizedError( + message=f"You have to have a renku session cookie at {nb_config.session_id_cookie_name} " + "in order to launch an anonymous session." + ) + # NOTE: Amalthea looks for the token value first in the cookie and then in the authorization header + secret_data = { + "auth": safe_dump( + { + "authproxy": { + "token": session_id, + "cookie_key": nb_config.session_id_cookie_name, + "verbose": True, + } + } + ) + } + secret = V1Secret(metadata=V1ObjectMeta(name=server_name), string_data=secret_data) + return ExtraSecret(secret) + + +def get_gitlab_image_pull_secret( + nb_config: NotebooksConfig, user: AuthenticatedAPIUser, image_pull_secret_name: str, access_token: str +) -> ExtraSecret: + """Create a Kubernetes secret for private GitLab registry authentication.""" + + k8s_namespace = nb_config.k8s_client.namespace() + + registry_secret = { + "auths": { + nb_config.git.registry: { + "Username": "oauth2", + "Password": access_token, + "Email": user.email, + } + } + } + registry_secret = json.dumps(registry_secret) + + secret_data = {".dockerconfigjson": registry_secret} + secret = V1Secret( + metadata=V1ObjectMeta(name=image_pull_secret_name, namespace=k8s_namespace), + string_data=secret_data, + type="kubernetes.io/dockerconfigjson", + ) + + return ExtraSecret(secret) + + +async def get_data_sources( + nb_config: NotebooksConfig, + user: AnonymousAPIUser | AuthenticatedAPIUser, + server_name: str, + data_connectors_stream: AsyncIterator[DataConnectorWithSecrets], + work_dir: PurePosixPath, + cloud_storage_overrides: list[apispec.SessionCloudStoragePost], + user_repo: UserRepo, +) -> tuple[list[DataSource], list[ExtraSecret], dict[str, list[DataConnectorSecret]]]: + """Generate cloud storage related resources.""" + data_sources: list[DataSource] = [] + secrets: list[ExtraSecret] = [] + dcs: dict[str, RCloneStorage] = {} + dcs_secrets: dict[str, list[DataConnectorSecret]] = {} + user_secret_key: str | None = None + async for dc in data_connectors_stream: + mount_folder = ( + dc.data_connector.storage.target_path + if PurePosixPath(dc.data_connector.storage.target_path).is_absolute() + else (work_dir / dc.data_connector.storage.target_path).as_posix() + ) + dcs[str(dc.data_connector.id)] = RCloneStorage( + source_path=dc.data_connector.storage.source_path, + mount_folder=mount_folder, + configuration=dc.data_connector.storage.configuration, + readonly=dc.data_connector.storage.readonly, + name=dc.data_connector.name, + secrets={str(secret.secret_id): secret.name for secret in dc.secrets}, + storage_class=nb_config.cloud_storage.storage_class, + ) + if len(dc.secrets) > 0: + dcs_secrets[str(dc.data_connector.id)] = dc.secrets + if isinstance(user, AuthenticatedAPIUser) and len(dcs_secrets) > 0: + secret_key = await user_repo.get_or_create_user_secret_key(user) + user_secret_key = get_encryption_key(secret_key.encode(), user.id.encode()).decode("utf-8") + # NOTE: Check the cloud storage overrides from the request body and if any match + # then overwrite the projects cloud storages + # NOTE: Cloud storages in the session launch request body that are not from the DB will cause a 404 error + # NOTE: Overriding the configuration when a saved secret is there will cause a 422 error + for csr in cloud_storage_overrides: + csr_id = csr.storage_id + if csr_id not in dcs: + raise errors.MissingResourceError( + message=f"You have requested a cloud storage with ID {csr_id} which does not exist " + "or you dont have access to." + ) + if csr.target_path is not None and not PurePosixPath(csr.target_path).is_absolute(): + csr.target_path = (work_dir / csr.target_path).as_posix() + dcs[csr_id] = dcs[csr_id].with_override(csr) + + # Handle potential duplicate target_path + dcs = _deduplicate_target_paths(dcs) + + for cs_id, cs in dcs.items(): + secret_name = f"{server_name}-ds-{cs_id.lower()}" + secret_key_needed = len(dcs_secrets.get(cs_id, [])) > 0 + if secret_key_needed and user_secret_key is None: + raise errors.ProgrammingError( + message=f"You have saved storage secrets for data connector {cs_id} " + f"associated with your user ID {user.id} but no key to decrypt them, " + "therefore we cannot mount the requested data connector. " + "Please report this to the renku administrators." + ) + secret = ExtraSecret( + cs.secret( + secret_name, + nb_config.k8s_client.namespace(), + user_secret_key=user_secret_key if secret_key_needed else None, + ) + ) + secrets.append(secret) + data_sources.append( + DataSource( + mountPath=cs.mount_folder, + secretRef=secret.ref(), + accessMode="ReadOnlyMany" if cs.readonly else "ReadWriteOnce", + ) + ) + return data_sources, secrets, dcs_secrets + + +async def request_dc_secret_creation( + user: AuthenticatedAPIUser | AnonymousAPIUser, + nb_config: NotebooksConfig, + manifest: AmaltheaSessionV1Alpha1, + dc_secrets: dict[str, list[DataConnectorSecret]], +) -> None: + """Request the specified data connector secrets to be created by the secret service.""" + if isinstance(user, AnonymousAPIUser): + return + owner_reference = { + "apiVersion": manifest.apiVersion, + "kind": manifest.kind, + "name": manifest.metadata.name, + "uid": manifest.metadata.uid, + } + secrets_url = nb_config.user_secrets.secrets_storage_service_url + "/api/secrets/kubernetes" + headers = {"Authorization": f"bearer {user.access_token}"} + for s_id, secrets in dc_secrets.items(): + if len(secrets) == 0: + continue + request_data = { + "name": f"{manifest.metadata.name}-ds-{s_id.lower()}-secrets", + "namespace": nb_config.k8s_v2_client.namespace(), + "secret_ids": [str(secret.secret_id) for secret in secrets], + "owner_references": [owner_reference], + "key_mapping": {str(secret.secret_id): secret.name for secret in secrets}, + } + async with httpx.AsyncClient(timeout=10) as client: + res = await client.post(secrets_url, headers=headers, json=request_data) + if res.status_code >= 300 or res.status_code < 200: + raise errors.ProgrammingError( + message=f"The secret for data connector with {s_id} could not be " + f"successfully created, the status code was {res.status_code}." + "Please contact a Renku administrator.", + detail=res.text, + ) + + +def get_launcher_env_variables(launcher: SessionLauncher, body: apispec.SessionPostRequest) -> list[SessionEnvItem]: + """Get the environment variables from the launcher, with overrides from the request.""" + output: list[SessionEnvItem] = [] + env_overrides = {i.name: i.value for i in body.env_variable_overrides or []} + for env in launcher.env_variables or []: + if env.name in env_overrides: + output.append(SessionEnvItem(name=env.name, value=env_overrides[env.name])) + else: + output.append(SessionEnvItem(name=env.name, value=env.value)) + return output + + +def verify_launcher_env_variable_overrides(launcher: SessionLauncher, body: apispec.SessionPostRequest) -> None: + """Raise an error if there are env variables that are not defined in the launcher.""" + env_overrides = {i.name: i.value for i in body.env_variable_overrides or []} + known_env_names = {i.name for i in launcher.env_variables or []} + unknown_env_names = set(env_overrides.keys()) - known_env_names + if unknown_env_names: + message = f"""The following environment variables are not defined in the session launcher: {unknown_env_names}. + Please remove them from the launch request or add them to the session launcher.""" + raise errors.ValidationError(message=message) + + +async def request_session_secret_creation( + user: AuthenticatedAPIUser | AnonymousAPIUser, + nb_config: NotebooksConfig, + manifest: AmaltheaSessionV1Alpha1, + session_secrets: list[SessionSecret], +) -> None: + """Request the specified user session secrets to be created by the secret service.""" + if isinstance(user, AnonymousAPIUser): + return + if not session_secrets: + return + owner_reference = { + "apiVersion": manifest.apiVersion, + "kind": manifest.kind, + "name": manifest.metadata.name, + "uid": manifest.metadata.uid, + } + key_mapping: dict[str, list[str]] = dict() + for s in session_secrets: + secret_id = str(s.secret_id) + if secret_id not in key_mapping: + key_mapping[secret_id] = list() + key_mapping[secret_id].append(s.secret_slot.filename) + request_data = { + "name": f"{manifest.metadata.name}-secrets", + "namespace": nb_config.k8s_v2_client.namespace(), + "secret_ids": [str(s.secret_id) for s in session_secrets], + "owner_references": [owner_reference], + "key_mapping": key_mapping, + } + secrets_url = nb_config.user_secrets.secrets_storage_service_url + "/api/secrets/kubernetes" + headers = {"Authorization": f"bearer {user.access_token}"} + async with httpx.AsyncClient(timeout=10) as client: + res = await client.post(secrets_url, headers=headers, json=request_data) + if res.status_code >= 300 or res.status_code < 200: + raise errors.ProgrammingError( + message="The session secrets could not be successfully created, " + f"the status code was {res.status_code}." + "Please contact a Renku administrator.", + detail=res.text, + ) + + +def resources_from_resource_class(resource_class: ResourceClass) -> Resources: + """Convert the resource class to a k8s resources spec.""" + requests: dict[str, Requests | RequestsStr] = { + "cpu": RequestsStr(str(round(resource_class.cpu * 1000)) + "m"), + "memory": RequestsStr(f"{resource_class.memory}Gi"), + } + limits: dict[str, Limits | LimitsStr] = {"memory": LimitsStr(f"{resource_class.memory}Gi")} + if resource_class.gpu > 0: + gpu_name = GpuKind.NVIDIA.value + "/gpu" + requests[gpu_name] = Requests(resource_class.gpu) + # NOTE: GPUs have to be set in limits too since GPUs cannot be overcommited, if + # not on some clusters this will cause the session to fully fail to start. + limits[gpu_name] = Limits(resource_class.gpu) + return Resources(requests=requests, limits=limits if len(limits) > 0 else None) + + +def repositories_from_project(project: Project, git_providers: list[GitProvider]) -> list[Repository]: + """Get the list of git repositories from a project.""" + repositories: list[Repository] = [] + for repo in project.repositories: + found_provider_id: str | None = None + for provider in git_providers: + if urlparse(provider.url).netloc == urlparse(repo).netloc: + found_provider_id = provider.id + break + repositories.append(Repository(url=repo, provider=found_provider_id)) + return repositories + + +async def repositories_from_session( + user: AnonymousAPIUser | AuthenticatedAPIUser, + session: AmaltheaSessionV1Alpha1, + project_repo: ProjectRepository, + git_providers: list[GitProvider], +) -> list[Repository]: + """Get the list of git repositories from a session.""" + try: + project = await project_repo.get_project(user, session.project_id) + except errors.MissingResourceError: + return [] + return repositories_from_project(project, git_providers) + + +def get_culling( + user: AuthenticatedAPIUser | AnonymousAPIUser, resource_pool: ResourcePool, nb_config: NotebooksConfig +) -> Culling: + """Create the culling specification for an AmaltheaSession.""" + idle_threshold_seconds = resource_pool.idle_threshold or nb_config.sessions.culling.registered.idle_seconds + if user.is_anonymous: + # NOTE: Anonymous sessions should not be hibernated at all, but there is no such option in Amalthea + # So in this case we set a very low hibernation threshold so the session is deleted quickly after + # it is hibernated. + hibernation_threshold_seconds = 1 + else: + hibernation_threshold_seconds = ( + resource_pool.hibernation_threshold or nb_config.sessions.culling.registered.hibernated_seconds + ) + return Culling( + maxAge=timedelta(seconds=nb_config.sessions.culling.registered.max_age_seconds), + maxFailedDuration=timedelta(seconds=nb_config.sessions.culling.registered.failed_seconds), + maxHibernatedDuration=timedelta(seconds=hibernation_threshold_seconds), + maxIdleDuration=timedelta(seconds=idle_threshold_seconds), + maxStartingDuration=timedelta(seconds=nb_config.sessions.culling.registered.pending_seconds), + ) + + +async def requires_image_pull_secret(nb_config: NotebooksConfig, image: str, internal_gitlab_user: APIUser) -> bool: + """Determines if an image requires a pull secret based on its visibility and their GitLab access token.""" + + parsed_image = Image.from_path(image) + image_repo = parsed_image.repo_api() + + image_exists_publicly = await image_repo.image_exists(parsed_image) + if image_exists_publicly: + return False + + if parsed_image.hostname == nb_config.git.registry and internal_gitlab_user.access_token: + image_repo = image_repo.with_oauth2_token(internal_gitlab_user.access_token) + image_exists_privately = await image_repo.image_exists(parsed_image) + if image_exists_privately: + return True + # No pull secret needed if the image is private and the user cannot access it + return False + + +async def patch_session( + body: apispec.SessionPatchRequest, + session_id: str, + nb_config: NotebooksConfig, + user: AnonymousAPIUser | AuthenticatedAPIUser, + internal_gitlab_user: APIUser, + rp_repo: ResourcePoolRepository, + project_repo: ProjectRepository, + metrics: MetricsService, +) -> AmaltheaSessionV1Alpha1: + """Patch an Amalthea session.""" + session = await nb_config.k8s_v2_client.get_session(session_id, user.id) + if session is None: + raise errors.MissingResourceError(message=f"The session with ID {session_id} does not exist") + if session.spec is None: + raise errors.ProgrammingError( + message=f"The session {session_id} being patched is missing the expected 'spec' field.", quiet=True + ) + cluster = await nb_config.k8s_v2_client.cluster_by_class_id(session.resource_class_id(), user) + + patch = AmaltheaSessionV1Alpha1Patch(spec=AmaltheaSessionV1Alpha1SpecPatch()) + is_getting_hibernated: bool = False + + # Hibernation + # TODO: Some patching should only be done when the session is in some states to avoid inadvertent restarts + # Refresh tokens for git proxy + if ( + body.state is not None + and body.state.value.lower() == State.Hibernated.value.lower() + and body.state.value.lower() != session.status.state.value.lower() + ): + # Session is being hibernated + patch.spec.hibernated = True + is_getting_hibernated = True + elif ( + body.state is not None + and body.state.value.lower() == State.Running.value.lower() + and session.status.state.value.lower() != body.state.value.lower() + ): + # Session is being resumed + patch.spec.hibernated = False + await metrics.user_requested_session_resume(user, metadata={"session_id": session_id}) + + # Resource class + if body.resource_class_id is not None: + new_cluster = await nb_config.k8s_v2_client.cluster_by_class_id(body.resource_class_id, user) + if new_cluster.id != cluster.id: + raise errors.ValidationError( + message=( + f"The requested resource class {body.resource_class_id} is not in the " + f"same cluster {cluster.id} as the current resource class {session.resource_class_id()}." + ) + ) + rp = await rp_repo.get_resource_pool_from_class(user, body.resource_class_id) + rc = rp.get_resource_class(body.resource_class_id) + if not rc: + raise errors.MissingResourceError( + message=f"The resource class you requested with ID {body.resource_class_id} does not exist" + ) + if not patch.spec.session: + patch.spec.session = AmaltheaSessionV1Alpha1SpecSessionPatch() + patch.spec.session.resources = resources_from_resource_class(rc) + # Tolerations + tolerations = tolerations_from_resource_class(rc, nb_config.sessions.tolerations_model) + patch.spec.tolerations = tolerations + # Affinities + patch.spec.affinity = node_affinity_from_resource_class(rc, nb_config.sessions.affinity_model) + # Priority class (if a quota is being used) + patch.spec.priorityClassName = rc.quota + patch.spec.culling = get_culling(user, rp, nb_config) + if rp.cluster is not None: + patch.spec.service_account_name = rp.cluster.service_account_name + + # If the session is being hibernated we do not need to patch anything else that is + # not specifically called for in the request body, we can refresh things when the user resumes. + if is_getting_hibernated: + return await nb_config.k8s_v2_client.patch_session(session_id, user.id, patch.to_rfc7386()) + + # Patching the extra containers (includes the git proxy) + git_providers = await nb_config.git_provider_helper.get_providers(user) + repositories = await repositories_from_session(user, session, project_repo, git_providers) + extra_containers = await get_extra_containers( + nb_config, + user, + repositories, + git_providers, + ) + if extra_containers: + patch.spec.extraContainers = extra_containers + + # Patching the image pull secret + if isinstance(user, AuthenticatedAPIUser) and internal_gitlab_user.access_token is not None: + image = session.spec.session.image + server_name = session.metadata.name + needs_pull_secret = await requires_image_pull_secret(nb_config, image, internal_gitlab_user) + logger.info(f"Session with ID {session_id} needs pull secret for image {image}: {needs_pull_secret}") + + if needs_pull_secret: + image_pull_secret_name = f"{server_name}-image-secret" + + # Always create a fresh secret to ensure we have the latest token + image_secret = get_gitlab_image_pull_secret( + nb_config, user, image_pull_secret_name, internal_gitlab_user.access_token + ) + + if not image_secret: + logger.error(f"Failed to create image pull secret for session ID {session_id} with image {image}") + raise errors.ProgrammingError( + message=f"We cannot retrive credentials for your private image {image}. " + "In order to resolve this problem, you can try to log out and back in " + "and/or check that you still have permissions for the image repository." + ) + # Ensure the secret is created in the cluster + await nb_config.k8s_v2_client.create_secret(image_secret.secret, cluster) + + updated_secrets = [ + secret for secret in (session.spec.imagePullSecrets or []) if not secret.name.endswith("-image-secret") + ] + updated_secrets.append(ImagePullSecret(name=image_pull_secret_name, adopt=True)) + patch.spec.imagePullSecrets = updated_secrets + + patch_serialized = patch.to_rfc7386() + if len(patch_serialized) == 0: + return session + + return await nb_config.k8s_v2_client.patch_session(session_id, user.id, patch_serialized) + + +def _deduplicate_target_paths(dcs: dict[str, RCloneStorage]) -> dict[str, RCloneStorage]: + """Ensures that the target paths for all storages are unique. + + This method will attempt to de-duplicate the target_path for all items passed in, + and raise an error if it fails to generate unique target_path. + """ + result_dcs: dict[str, RCloneStorage] = {} + mount_folders: dict[str, list[str]] = {} + + def _find_mount_folder(dc: RCloneStorage) -> str: + mount_folder = dc.mount_folder + if mount_folder not in mount_folders: + return mount_folder + # 1. Try with a "-1", "-2", etc. suffix + mount_folder_try = f"{mount_folder}-{len(mount_folders[mount_folder])}" + if mount_folder_try not in mount_folders: + return mount_folder_try + # 2. Try with a random suffix + suffix = "".join([random.choice(string.ascii_lowercase + string.digits) for _ in range(4)]) # nosec B311 + mount_folder_try = f"{mount_folder}-{suffix}" + if mount_folder_try not in mount_folders: + return mount_folder_try + raise errors.ValidationError( + message=f"Could not start session because two or more data connectors ({', '.join(mount_folders[mount_folder])}) share the same mount point '{mount_folder}'" # noqa E501 + ) + + for dc_id, dc in dcs.items(): + original_mount_folder = dc.mount_folder + new_mount_folder = _find_mount_folder(dc) + # Keep track of the original mount folder here + if new_mount_folder != original_mount_folder: + logger.warning(f"Re-assigning data connector {dc_id} to mount point '{new_mount_folder}'") + dc_ids = mount_folders.get(original_mount_folder, []) + dc_ids.append(dc_id) + mount_folders[original_mount_folder] = dc_ids + # Keep track of the assigned mount folder here + dc_ids = mount_folders.get(new_mount_folder, []) + dc_ids.append(dc_id) + mount_folders[new_mount_folder] = dc_ids + result_dcs[dc_id] = dc.with_override( + override=apispec.SessionCloudStoragePost(storage_id=dc_id, target_path=new_mount_folder) + ) + + return result_dcs diff --git a/components/renku_data_services/notebooks/cr_amalthea_session.py b/components/renku_data_services/notebooks/cr_amalthea_session.py index 67b265f6d..87302cd5d 100644 --- a/components/renku_data_services/notebooks/cr_amalthea_session.py +++ b/components/renku_data_services/notebooks/cr_amalthea_session.py @@ -1,14 +1,14 @@ # generated by datamodel-codegen: # filename: -# timestamp: 2024-11-29T10:46:16+00:00 +# timestamp: 2025-07-07T13:49:34+00:00 from __future__ import annotations -from datetime import datetime +from datetime import datetime, timedelta from enum import Enum from typing import Any, Dict, List, Optional, Union -from pydantic import ConfigDict, Field +from pydantic import ConfigDict, Field, RootModel from renku_data_services.notebooks.cr_base import BaseCRD @@ -162,7 +162,15 @@ class PodAffinityTerm(BaseCRD): ) labelSelector: Optional[LabelSelector] = Field( default=None, - description="A label query over a set of resources, in this case pods.", + description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", + ) + matchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", + ) + mismatchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) namespaceSelector: Optional[NamespaceSelector] = Field( default=None, @@ -226,7 +234,15 @@ class RequiredDuringSchedulingIgnoredDuringExecutionItem(BaseCRD): ) labelSelector: Optional[LabelSelector1] = Field( default=None, - description="A label query over a set of resources, in this case pods.", + description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", + ) + matchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", + ) + mismatchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) namespaceSelector: Optional[NamespaceSelector1] = Field( default=None, @@ -294,7 +310,15 @@ class PodAffinityTerm1(BaseCRD): ) labelSelector: Optional[LabelSelector2] = Field( default=None, - description="A label query over a set of resources, in this case pods.", + description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", + ) + matchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", + ) + mismatchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) namespaceSelector: Optional[NamespaceSelector2] = Field( default=None, @@ -358,7 +382,15 @@ class RequiredDuringSchedulingIgnoredDuringExecutionItem1(BaseCRD): ) labelSelector: Optional[LabelSelector3] = Field( default=None, - description="A label query over a set of resources, in this case pods.", + description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", + ) + matchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", + ) + mismatchLabelKeys: Optional[List[str]] = Field( + default=None, + description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) namespaceSelector: Optional[NamespaceSelector3] = Field( default=None, @@ -420,13 +452,17 @@ class ExtraVolumeMount(BaseCRD): ) mountPropagation: Optional[str] = Field( default=None, - description="mountPropagation determines how mounts are propagated from the host\nto container and the other way around.\nWhen not set, MountPropagationNone is used.\nThis field is beta in 1.10.", + description="mountPropagation determines how mounts are propagated from the host\nto container and the other way around.\nWhen not set, MountPropagationNone is used.\nThis field is beta in 1.10.\nWhen RecursiveReadOnly is set to IfPossible or to Enabled, MountPropagation must be None or unspecified\n(which defaults to None).", ) name: str = Field(..., description="This must match the Name of a Volume.") readOnly: Optional[bool] = Field( default=None, description="Mounted read-only if true, read-write otherwise (false or unspecified).\nDefaults to false.", ) + recursiveReadOnly: Optional[str] = Field( + default=None, + description="RecursiveReadOnly specifies whether read-only mounts should be handled\nrecursively.\n\nIf ReadOnly is false, this field has no meaning and must be unspecified.\n\nIf ReadOnly is true, and this field is set to Disabled, the mount is not made\nrecursively read-only. If this field is set to IfPossible, the mount is made\nrecursively read-only, if it is supported by the container runtime. If this\nfield is set to Enabled, the mount is made recursively read-only if it is\nsupported by the container runtime, otherwise the pod will not be started and\nan error will be generated to indicate the reason.\n\nIf this field is set to IfPossible or Enabled, MountPropagation must be set to\nNone (or be unspecified, which defaults to None).\n\nIf this field is not specified, it is treated as an equivalent of Disabled.", + ) subPath: Optional[str] = Field( default=None, description="Path within the volume from which the container's volume should be mounted.\nDefaults to \"\" (volume's root).", @@ -445,13 +481,17 @@ class SecretRef(BaseCRD): default=None, description="If the secret is adopted then the operator will delete the secret when the custom resource that uses it is deleted.", ) - key: str + key: Optional[str] = Field( + default=None, + description="The key is optional because it may not be relevant depending on where or how the secret is used.\nFor example, for authentication see the `secretRef` field in `spec.authentication`\nfor more details.", + ) name: str class Type(Enum): token = "token" oauth2proxy = "oauth2proxy" + oidc = "oidc" class Authentication(BaseCRD): @@ -465,7 +505,7 @@ class Authentication(BaseCRD): ) secretRef: SecretRef = Field( ..., - description="Kubernetes secret that contains the authentication configuration\nFor `token` a yaml file with the following keys is required:\n - token: the token value used to authenticate the user\n - cookie_key: the name of the cookie where the token will be saved and searched for\nFor `oauth2proxy` please see https://oauth2-proxy.github.io/oauth2-proxy/configuration/overview#config-file.\nNote that the `upstream` and `http_address` configuration options cannot be set from the secret because\nthe operator knows how to set these options to the proper values.", + description='Kubernetes secret that contains the authentication configuration.\nFor `token` a single key in the secret should have a yaml file with the following format:\n - token: the token value used to authenticate the user\n - cookie_key: the name of the cookie where the token will be saved and searched for\n - the `key` field in `secretRef` should point to the the `key` of the Kubernetes secret that has this format.\nFor `oauth2proxy` a single key in the secret should have the configuration:\n - see https://oauth2-proxy.github.io/oauth2-proxy/configuration/overview#config-file\n - the `upstream` and `http_address` configuration options are ignored and overridden by the operator\n - the `key` field in `secretRef` should point to the the `key` of the Kubernetes secret that has this format.\nFor `oidc` the secret should have the following keys with the corresponding values:\n - OIDC_CLIENT_ID - the OIDC client ID\n - OIDC_CLIENT_SECRET - the OIDC client secret\n - OIDC_ISSUER_URL - the OIDC issuer url\n - AUTHORIZED_EMAILS - newline delimited list of user emails that should have access the session\n - ALLOW_UNVERIFIED_EMAILS - allow users with unverified emails to authenticate, set to "true" or "false"\n - the `key` field in `secretRef` should be left unset or it will be ignored', ) type: Type @@ -505,7 +545,7 @@ class CodeRepository(BaseCRD): clonePath: str = Field( default=".", description="Path relative to the session working directory where the repository should be cloned into.", - example="repositories/project1", + examples=["repositories/project1"], ) cloningConfigSecretRef: Optional[CloningConfigSecretRef] = Field( default=None, @@ -518,12 +558,12 @@ class CodeRepository(BaseCRD): remote: str = Field( ..., description="The HTTP url to the code repository", - example="https://github.com/SwissDataScienceCenter/renku", + examples=["https://github.com/SwissDataScienceCenter/renku"], ) revision: Optional[str] = Field( default=None, description="The tag, branch or commit SHA to checkout, if omitted then will be the tip of the default branch of the repo", - example="main", + examples=["main"], ) type: Type1 = Field( default="git", @@ -535,39 +575,28 @@ class Culling(BaseCRD): model_config = ConfigDict( extra="allow", ) - maxAge: Optional[str] = Field( + maxAge: Optional[timedelta] = Field( default=None, description='The maximum allowed age for a session, regardless of whether it\nis active or not. When the threshold is reached the session is hibernated.\nA value of zero indicates that Amalthea will not automatically hibernate\nthe session based on its age.\nGolang\'s time.ParseDuration is used to parse this, so values like 2h5min will work,\nvalid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".', ) - maxFailedDuration: Optional[str] = Field( + maxFailedDuration: Optional[timedelta] = Field( default=None, description='How long can a server be in failed state before it gets hibernated. A\nvalue of zero indicates that the server will not be automatically\nhibernated by Amalthea if it is failing.\nGolang\'s time.ParseDuration is used to parse this, so values like 2h5min will work,\nvalid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".', ) - maxHibernatedDuration: Optional[str] = Field( + maxHibernatedDuration: Optional[timedelta] = Field( default=None, description='How long can a session be in hibernated state before\nit gets completely deleted. A value of zero indicates that hibernated servers\nwill not be automatically be deleted by Amalthea after a period of time.\nGolang\'s time.ParseDuration is used to parse this, so values like 2h5min will work,\nvalid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".', ) - maxIdleDuration: Optional[str] = Field( + maxIdleDuration: Optional[timedelta] = Field( default=None, description='How long should a server be idle for before it is hibernated. A value of\nzero indicates that Amalthea will not automatically hibernate inactive sessions.\nGolang\'s time.ParseDuration is used to parse this, so values like 2h5min will work,\nvalid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".', ) - maxStartingDuration: Optional[str] = Field( + maxStartingDuration: Optional[timedelta] = Field( default=None, description='How long can a server be in starting state before it gets hibernated. A\nvalue of zero indicates that the server will not be automatically hibernated\nby Amalthea because it took to long to start.\nGolang\'s time.ParseDuration is used to parse this, so values like 2h5min will work,\nvalid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".', ) -class SecretRef1(BaseCRD): - model_config = ConfigDict( - extra="allow", - ) - adopt: Optional[bool] = Field( - default=None, - description="If the secret is adopted then the operator will delete the secret when the custom resource that uses it is deleted.", - ) - name: str - - class Type2(Enum): rclone = "rclone" @@ -582,9 +611,9 @@ class DataSource(BaseCRD): mountPath: str = Field( default="data", description="Path relative to the session working directory where the data should be mounted", - example="data/storages", + examples=["data/storages"], ) - secretRef: Optional[SecretRef1] = Field( + secretRef: Optional[SecretRef] = Field( default=None, description="The secret containing the configuration or credentials needed for access to the data.\nThe format of the configuration that is expected depends on the storage type.\nNOTE: define all values in a single key of the Kubernetes secret.\nrclone: any valid rclone configuration for a single remote, see the output of `rclone config providers` for validation and format.", ) @@ -596,9 +625,9 @@ class ConfigMapKeyRef(BaseCRD): extra="allow", ) key: str = Field(..., description="The key to select.") - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, @@ -619,6 +648,22 @@ class FieldRef(BaseCRD): ) +class Divisor(RootModel[int]): + root: int = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Divisor1(RootModel[str]): + root: str = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + class ResourceFieldRef(BaseCRD): model_config = ConfigDict( extra="allow", @@ -627,7 +672,7 @@ class ResourceFieldRef(BaseCRD): default=None, description="Container name: required for volumes, optional for env vars", ) - divisor: Optional[Union[int, str]] = Field( + divisor: Optional[Union[Divisor, Divisor1]] = Field( default=None, description='Specifies the output format of the exposed resources, defaults to "1"', ) @@ -642,9 +687,9 @@ class SecretKeyRef(BaseCRD): ..., description="The key of the secret to select from. Must be a valid secret key.", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, @@ -693,9 +738,9 @@ class ConfigMapRef(BaseCRD): model_config = ConfigDict( extra="allow", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, description="Specify whether the ConfigMap must be defined" @@ -706,9 +751,9 @@ class SecretRef2(BaseCRD): model_config = ConfigDict( extra="allow", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, description="Specify whether the Secret must be defined" @@ -724,7 +769,7 @@ class EnvFromItem(BaseCRD): ) prefix: Optional[str] = Field( default=None, - description="An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER.", + description="Optional text to prepend to the name of each environment variable. Must be a C_IDENTIFIER.", ) secretRef: Optional[SecretRef2] = Field( default=None, description="The Secret to select from" @@ -777,6 +822,13 @@ class HttpGet(BaseCRD): ) +class Sleep(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + seconds: int = Field(..., description="Seconds is the number of seconds to sleep.") + + class TcpSocket(BaseCRD): model_config = ConfigDict( extra="allow", @@ -796,14 +848,19 @@ class PostStart(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) httpGet: Optional[HttpGet] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." + ) + sleep: Optional[Sleep] = Field( + default=None, + description="Sleep represents a duration that the container should sleep.", ) tcpSocket: Optional[TcpSocket] = Field( default=None, - description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor the backward compatibility. There are no validation of this field and\nlifecycle hooks will fail in runtime when tcp handler is specified.", + description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor backward compatibility. There is no validation of this field and\nlifecycle hooks will fail at runtime when it is specified.", ) @@ -837,14 +894,19 @@ class PreStop(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) httpGet: Optional[HttpGet1] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." + ) + sleep: Optional[Sleep] = Field( + default=None, + description="Sleep represents a duration that the container should sleep.", ) tcpSocket: Optional[TcpSocket] = Field( default=None, - description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor the backward compatibility. There are no validation of this field and\nlifecycle hooks will fail in runtime when tcp handler is specified.", + description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor backward compatibility. There is no validation of this field and\nlifecycle hooks will fail at runtime when it is specified.", ) @@ -860,6 +922,10 @@ class Lifecycle(BaseCRD): default=None, description="PreStop is called immediately before a container is terminated due to an\nAPI request or management event such as liveness/startup probe failure,\npreemption, resource contention, etc. The handler is not called if the\ncontainer crashes or exits. The Pod's termination grace period countdown begins before the\nPreStop hook is executed. Regardless of the outcome of the handler, the\ncontainer will eventually terminate within the Pod's termination grace\nperiod (unless delayed by finalizers). Other management of the container blocks until the hook completes\nor until the termination grace period is reached.\nMore info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks", ) + stopSignal: Optional[str] = Field( + default=None, + description="StopSignal defines which signal will be sent to a container when it is being stopped.\nIf not specified, the default is defined by the container runtime in use.\nStopSignal can only be set for Pods with a non-empty .spec.os.name", + ) class Grpc(BaseCRD): @@ -870,9 +936,9 @@ class Grpc(BaseCRD): ..., description="Port number of the gRPC service. Number must be in the range 1 to 65535.", ) - service: Optional[str] = Field( - default=None, - description="Service is the name of the service to place in the gRPC HealthCheckRequest\n(see https://github.com/grpc/grpc/blob/master/doc/health-checking.md).\n\n\nIf this is not specified, the default behavior is defined by gRPC.", + service: str = Field( + default="", + description="Service is the name of the service to place in the gRPC HealthCheckRequest\n(see https://github.com/grpc/grpc/blob/master/doc/health-checking.md).\n\nIf this is not specified, the default behavior is defined by gRPC.", ) @@ -906,17 +972,18 @@ class LivenessProbe(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) failureThreshold: Optional[int] = Field( default=None, description="Minimum consecutive failures for the probe to be considered failed after having succeeded.\nDefaults to 3. Minimum value is 1.", ) grpc: Optional[Grpc] = Field( - default=None, description="GRPC specifies an action involving a GRPC port." + default=None, description="GRPC specifies a GRPC HealthCheckRequest." ) httpGet: Optional[HttpGet2] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." ) initialDelaySeconds: Optional[int] = Field( default=None, @@ -931,7 +998,7 @@ class LivenessProbe(BaseCRD): description="Minimum consecutive successes for the probe to be considered successful after having failed.\nDefaults to 1. Must be 1 for liveness and startup. Minimum value is 1.", ) tcpSocket: Optional[TcpSocket] = Field( - default=None, description="TCPSocket specifies an action involving a TCP port." + default=None, description="TCPSocket specifies a connection to a TCP port." ) terminationGracePeriodSeconds: Optional[int] = Field( default=None, @@ -998,17 +1065,18 @@ class ReadinessProbe(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) failureThreshold: Optional[int] = Field( default=None, description="Minimum consecutive failures for the probe to be considered failed after having succeeded.\nDefaults to 3. Minimum value is 1.", ) grpc: Optional[Grpc] = Field( - default=None, description="GRPC specifies an action involving a GRPC port." + default=None, description="GRPC specifies a GRPC HealthCheckRequest." ) httpGet: Optional[HttpGet3] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." ) initialDelaySeconds: Optional[int] = Field( default=None, @@ -1023,7 +1091,7 @@ class ReadinessProbe(BaseCRD): description="Minimum consecutive successes for the probe to be considered successful after having failed.\nDefaults to 1. Must be 1 for liveness and startup. Minimum value is 1.", ) tcpSocket: Optional[TcpSocket] = Field( - default=None, description="TCPSocket specifies an action involving a TCP port." + default=None, description="TCPSocket specifies a connection to a TCP port." ) terminationGracePeriodSeconds: Optional[int] = Field( default=None, @@ -1057,6 +1125,38 @@ class Claim(BaseCRD): ..., description="Name must match the name of one entry in pod.spec.resourceClaims of\nthe Pod where this field is used. It makes that resource available\ninside a container.", ) + request: Optional[str] = Field( + default=None, + description="Request is the name chosen for a request in the referenced claim.\nIf empty, everything from the claim is made available, otherwise\nonly the result of this request.", + ) + + +class Limits(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Limits1(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests1(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) class Resources(BaseCRD): @@ -1065,18 +1165,32 @@ class Resources(BaseCRD): ) claims: Optional[List[Claim]] = Field( default=None, - description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\n\nThis field is immutable. It can only be set for containers.", + description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\nThis field is immutable. It can only be set for containers.", ) - limits: Optional[Dict[str, Union[int, str]]] = Field( + limits: Optional[Dict[str, Union[Limits, Limits1]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[int, str]]] = Field( + requests: Optional[Dict[str, Union[Requests, Requests1]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) +class AppArmorProfile(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + localhostProfile: Optional[str] = Field( + default=None, + description='localhostProfile indicates a profile loaded on the node that should be used.\nThe profile must be preconfigured on the node to work.\nMust match the loaded name of the profile.\nMust be set if and only if type is "Localhost".', + ) + type: str = Field( + ..., + description="type indicates which kind of AppArmor profile will be applied.\nValid options are:\n Localhost - a profile pre-loaded on the node.\n RuntimeDefault - the container runtime's default profile.\n Unconfined - no AppArmor enforcement.", + ) + + class Capabilities(BaseCRD): model_config = ConfigDict( extra="allow", @@ -1117,7 +1231,7 @@ class SeccompProfile(BaseCRD): ) type: str = Field( ..., - description="type indicates which kind of seccomp profile will be applied.\nValid options are:\n\n\nLocalhost - a profile defined in a file on the node should be used.\nRuntimeDefault - the container runtime default profile should be used.\nUnconfined - no profile should be applied.", + description="type indicates which kind of seccomp profile will be applied.\nValid options are:\n\nLocalhost - a profile defined in a file on the node should be used.\nRuntimeDefault - the container runtime default profile should be used.\nUnconfined - no profile should be applied.", ) @@ -1151,6 +1265,10 @@ class SecurityContext(BaseCRD): default=None, description="AllowPrivilegeEscalation controls whether a process can gain more\nprivileges than its parent process. This bool directly controls if\nthe no_new_privs flag will be set on the container process.\nAllowPrivilegeEscalation is true always when the container is:\n1) run as Privileged\n2) has CAP_SYS_ADMIN\nNote that this field cannot be set when spec.os.name is windows.", ) + appArmorProfile: Optional[AppArmorProfile] = Field( + default=None, + description="appArmorProfile is the AppArmor options to use by this container. If set, this profile\noverrides the pod's appArmorProfile.\nNote that this field cannot be set when spec.os.name is windows.", + ) capabilities: Optional[Capabilities] = Field( default=None, description="The capabilities to add/drop when running containers.\nDefaults to the default set of capabilities granted by the container runtime.\nNote that this field cannot be set when spec.os.name is windows.", @@ -1161,7 +1279,7 @@ class SecurityContext(BaseCRD): ) procMount: Optional[str] = Field( default=None, - description="procMount denotes the type of proc mount to use for the containers.\nThe default is DefaultProcMount which uses the container runtime defaults for\nreadonly paths and masked paths.\nThis requires the ProcMountType feature flag to be enabled.\nNote that this field cannot be set when spec.os.name is windows.", + description="procMount denotes the type of proc mount to use for the containers.\nThe default value is Default which uses the container runtime defaults for\nreadonly paths and masked paths.\nThis requires the ProcMountType feature flag to be enabled.\nNote that this field cannot be set when spec.os.name is windows.", ) readOnlyRootFilesystem: Optional[bool] = Field( default=None, @@ -1223,17 +1341,18 @@ class StartupProbe(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) failureThreshold: Optional[int] = Field( default=None, description="Minimum consecutive failures for the probe to be considered failed after having succeeded.\nDefaults to 3. Minimum value is 1.", ) grpc: Optional[Grpc] = Field( - default=None, description="GRPC specifies an action involving a GRPC port." + default=None, description="GRPC specifies a GRPC HealthCheckRequest." ) httpGet: Optional[HttpGet4] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." ) initialDelaySeconds: Optional[int] = Field( default=None, @@ -1248,7 +1367,7 @@ class StartupProbe(BaseCRD): description="Minimum consecutive successes for the probe to be considered successful after having failed.\nDefaults to 1. Must be 1 for liveness and startup. Minimum value is 1.", ) tcpSocket: Optional[TcpSocket] = Field( - default=None, description="TCPSocket specifies an action involving a TCP port." + default=None, description="TCPSocket specifies a connection to a TCP port." ) terminationGracePeriodSeconds: Optional[int] = Field( default=None, @@ -1284,13 +1403,17 @@ class VolumeMount(BaseCRD): ) mountPropagation: Optional[str] = Field( default=None, - description="mountPropagation determines how mounts are propagated from the host\nto container and the other way around.\nWhen not set, MountPropagationNone is used.\nThis field is beta in 1.10.", + description="mountPropagation determines how mounts are propagated from the host\nto container and the other way around.\nWhen not set, MountPropagationNone is used.\nThis field is beta in 1.10.\nWhen RecursiveReadOnly is set to IfPossible or to Enabled, MountPropagation must be None or unspecified\n(which defaults to None).", ) name: str = Field(..., description="This must match the Name of a Volume.") readOnly: Optional[bool] = Field( default=None, description="Mounted read-only if true, read-write otherwise (false or unspecified).\nDefaults to false.", ) + recursiveReadOnly: Optional[str] = Field( + default=None, + description="RecursiveReadOnly specifies whether read-only mounts should be handled\nrecursively.\n\nIf ReadOnly is false, this field has no meaning and must be unspecified.\n\nIf ReadOnly is true, and this field is set to Disabled, the mount is not made\nrecursively read-only. If this field is set to IfPossible, the mount is made\nrecursively read-only, if it is supported by the container runtime. If this\nfield is set to Enabled, the mount is made recursively read-only if it is\nsupported by the container runtime, otherwise the pod will not be started and\nan error will be generated to indicate the reason.\n\nIf this field is set to IfPossible or Enabled, MountPropagation must be set to\nNone (or be unspecified, which defaults to None).\n\nIf this field is not specified, it is treated as an equivalent of Disabled.", + ) subPath: Optional[str] = Field( default=None, description="Path within the volume from which the container's volume should be mounted.\nDefaults to \"\" (volume's root).", @@ -1408,7 +1531,7 @@ class AwsElasticBlockStore(BaseCRD): ) fsType: Optional[str] = Field( default=None, - description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore\nTODO: how do we prevent errors in the filesystem from compromising the machine', + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore', ) partition: Optional[int] = Field( default=None, @@ -1438,16 +1561,16 @@ class AzureDisk(BaseCRD): diskURI: str = Field( ..., description="diskURI is the URI of data disk in the blob storage" ) - fsType: Optional[str] = Field( - default=None, + fsType: str = Field( + default="ext4", description='fsType is Filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', ) kind: Optional[str] = Field( default=None, description="kind expected values are Shared: multiple blob disks per storage account Dedicated: single blob disk per storage account Managed: azure managed data disk (only in managed availability set). defaults to shared", ) - readOnly: Optional[bool] = Field( - default=None, + readOnly: bool = Field( + default=False, description="readOnly Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", ) @@ -1471,9 +1594,9 @@ class SecretRef3(BaseCRD): model_config = ConfigDict( extra="allow", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) @@ -1556,9 +1679,9 @@ class ConfigMap(BaseCRD): default=None, description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, @@ -1570,9 +1693,9 @@ class NodePublishSecretRef(BaseCRD): model_config = ConfigDict( extra="allow", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) @@ -1602,13 +1725,44 @@ class Csi(BaseCRD): ) +class Divisor2(RootModel[int]): + root: int = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Divisor3(RootModel[str]): + root: str = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class ResourceFieldRef1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + containerName: Optional[str] = Field( + default=None, + description="Container name: required for volumes, optional for env vars", + ) + divisor: Optional[Union[Divisor2, Divisor3]] = Field( + default=None, + description='Specifies the output format of the exposed resources, defaults to "1"', + ) + resource: str = Field(..., description="Required: resource to select") + + class Item1(BaseCRD): model_config = ConfigDict( extra="allow", ) fieldRef: Optional[FieldRef] = Field( default=None, - description="Required: Selects a field of the pod: only annotations, labels, name and namespace are supported.", + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", ) mode: Optional[int] = Field( default=None, @@ -1618,7 +1772,7 @@ class Item1(BaseCRD): ..., description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", ) - resourceFieldRef: Optional[ResourceFieldRef] = Field( + resourceFieldRef: Optional[ResourceFieldRef1] = Field( default=None, description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", ) @@ -1637,6 +1791,22 @@ class DownwardAPI(BaseCRD): ) +class SizeLimit(RootModel[int]): + root: int = Field( + ..., + description="sizeLimit is the total amount of local storage required for this EmptyDir volume.\nThe size limit is also applicable for memory medium.\nThe maximum usage on memory medium EmptyDir would be the minimum value between\nthe SizeLimit specified here and the sum of memory limits of all containers in a pod.\nThe default is nil which means that the limit is undefined.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir", + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class SizeLimit1(RootModel[str]): + root: str = Field( + ..., + description="sizeLimit is the total amount of local storage required for this EmptyDir volume.\nThe size limit is also applicable for memory medium.\nThe maximum usage on memory medium EmptyDir would be the minimum value between\nthe SizeLimit specified here and the sum of memory limits of all containers in a pod.\nThe default is nil which means that the limit is undefined.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir", + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + class EmptyDir(BaseCRD): model_config = ConfigDict( extra="allow", @@ -1645,7 +1815,7 @@ class EmptyDir(BaseCRD): default=None, description='medium represents what type of storage medium should back this directory.\nThe default is "" which means to use the node\'s default medium.\nMust be an empty string (default) or Memory.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir', ) - sizeLimit: Optional[Union[int, str]] = Field( + sizeLimit: Optional[Union[SizeLimit, SizeLimit1]] = Field( default=None, description="sizeLimit is the total amount of local storage required for this EmptyDir volume.\nThe size limit is also applicable for memory medium.\nThe maximum usage on memory medium EmptyDir would be the minimum value between\nthe SizeLimit specified here and the sum of memory limits of all containers in a pod.\nThe default is nil which means that the limit is undefined.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir", ) @@ -1679,19 +1849,43 @@ class DataSourceRef(BaseCRD): ) +class Limits2(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Limits3(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests2(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests3(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + class Resources1(BaseCRD): model_config = ConfigDict( extra="allow", ) - claims: Optional[List[Claim]] = Field( - default=None, - description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\n\nThis field is immutable. It can only be set for containers.", - ) - limits: Optional[Dict[str, Union[int, str]]] = Field( + limits: Optional[Dict[str, Union[Limits2, Limits3]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[int, str]]] = Field( + requests: Optional[Dict[str, Union[Requests2, Requests3]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) @@ -1739,6 +1933,10 @@ class Spec1(BaseCRD): default=None, description="storageClassName is the name of the StorageClass required by the claim.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1", ) + volumeAttributesClassName: Optional[str] = Field( + default=None, + description="volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim.\nIf specified, the CSI driver will create or update the volume with the attributes defined\nin the corresponding VolumeAttributesClass. This has a different purpose than storageClassName,\nit can be changed after the claim is created. An empty string value means that no VolumeAttributesClass\nwill be applied to the claim but it's not allowed to reset this field to empty string once it is set.\nIf unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass\nwill be set by the persistentvolume controller if it exists.\nIf the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be\nset to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource\nexists.\nMore info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/\n(Beta) Using this field requires the VolumeAttributesClass feature gate to be enabled (off by default).", + ) volumeMode: Optional[str] = Field( default=None, description="volumeMode defines what type of volume is required by the claim.\nValue of Filesystem is implied when not included in claim spec.", @@ -1769,7 +1967,7 @@ class Ephemeral(BaseCRD): ) volumeClaimTemplate: Optional[VolumeClaimTemplate] = Field( default=None, - description="Will be used to create a stand-alone PVC to provision the volume.\nThe pod in which this EphemeralVolumeSource is embedded will be the\nowner of the PVC, i.e. the PVC will be deleted together with the\npod. The name of the PVC will be `-` where\n`` is the name from the `PodSpec.Volumes` array\nentry. Pod validation will reject the pod if the concatenated name\nis not valid for a PVC (for example, too long).\n\n\nAn existing PVC with that name that is not owned by the pod\nwill *not* be used for the pod to avoid using an unrelated\nvolume by mistake. Starting the pod is then blocked until\nthe unrelated PVC is removed. If such a pre-created PVC is\nmeant to be used by the pod, the PVC has to updated with an\nowner reference to the pod once the pod exists. Normally\nthis should not be necessary, but it may be useful when\nmanually reconstructing a broken cluster.\n\n\nThis field is read-only and no changes will be made by Kubernetes\nto the PVC after it has been created.\n\n\nRequired, must not be nil.", + description="Will be used to create a stand-alone PVC to provision the volume.\nThe pod in which this EphemeralVolumeSource is embedded will be the\nowner of the PVC, i.e. the PVC will be deleted together with the\npod. The name of the PVC will be `-` where\n`` is the name from the `PodSpec.Volumes` array\nentry. Pod validation will reject the pod if the concatenated name\nis not valid for a PVC (for example, too long).\n\nAn existing PVC with that name that is not owned by the pod\nwill *not* be used for the pod to avoid using an unrelated\nvolume by mistake. Starting the pod is then blocked until\nthe unrelated PVC is removed. If such a pre-created PVC is\nmeant to be used by the pod, the PVC has to updated with an\nowner reference to the pod once the pod exists. Normally\nthis should not be necessary, but it may be useful when\nmanually reconstructing a broken cluster.\n\nThis field is read-only and no changes will be made by Kubernetes\nto the PVC after it has been created.\n\nRequired, must not be nil.", ) @@ -1779,7 +1977,7 @@ class Fc(BaseCRD): ) fsType: Optional[str] = Field( default=None, - description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nTODO: how do we prevent errors in the filesystem from compromising the machine', + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', ) lun: Optional[int] = Field( default=None, description="lun is Optional: FC target lun number" @@ -1843,7 +2041,7 @@ class GcePersistentDisk(BaseCRD): ) fsType: Optional[str] = Field( default=None, - description='fsType is filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk\nTODO: how do we prevent errors in the filesystem from compromising the machine', + description='fsType is filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk', ) partition: Optional[int] = Field( default=None, @@ -1906,6 +2104,20 @@ class HostPath(BaseCRD): ) +class Image(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + pullPolicy: Optional[str] = Field( + default=None, + description="Policy for pulling OCI objects. Possible values are:\nAlways: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails.\nNever: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present.\nIfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails.\nDefaults to Always if :latest tag is specified, or IfNotPresent otherwise.", + ) + reference: Optional[str] = Field( + default=None, + description="Required: Image or artifact reference to be used.\nBehaves in the same way as pod.spec.containers[*].image.\nPull secrets will be assembled in the same way as for the container image by looking up node credentials, SA image pull secrets, and pod spec image pull secrets.\nMore info: https://kubernetes.io/docs/concepts/containers/images\nThis field is optional to allow higher level config management to default or override\ncontainer images in workload controllers like Deployments and StatefulSets.", + ) + + class Iscsi(BaseCRD): model_config = ConfigDict( extra="allow", @@ -1920,15 +2132,15 @@ class Iscsi(BaseCRD): ) fsType: Optional[str] = Field( default=None, - description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi\nTODO: how do we prevent errors in the filesystem from compromising the machine', + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi', ) initiatorName: Optional[str] = Field( default=None, description="initiatorName is the custom iSCSI Initiator Name.\nIf initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface\n: will be created for the connection.", ) iqn: str = Field(..., description="iqn is the target iSCSI Qualified Name.") - iscsiInterface: Optional[str] = Field( - default=None, + iscsiInterface: str = Field( + default="default", description="iscsiInterface is the interface Name that uses an iSCSI transport.\nDefaults to 'default' (tcp).", ) lun: int = Field(..., description="lun represents iSCSI Target Lun number.") @@ -2013,6 +2225,45 @@ class PortworxVolume(BaseCRD): ) +class LabelSelector4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + matchExpressions: Optional[List[MatchExpression2]] = Field( + default=None, + description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", + ) + matchLabels: Optional[Dict[str, str]] = Field( + default=None, + description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', + ) + + +class ClusterTrustBundle(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + labelSelector: Optional[LabelSelector4] = Field( + default=None, + description='Select all ClusterTrustBundles that match this label selector. Only has\neffect if signerName is set. Mutually-exclusive with name. If unset,\ninterpreted as "match nothing". If set but empty, interpreted as "match\neverything".', + ) + name: Optional[str] = Field( + default=None, + description="Select a single ClusterTrustBundle by object name. Mutually-exclusive\nwith signerName and labelSelector.", + ) + optional: Optional[bool] = Field( + default=None, + description="If true, don't block pod startup if the referenced ClusterTrustBundle(s)\naren't available. If using name, then the named ClusterTrustBundle is\nallowed not to exist. If using signerName, then the combination of\nsignerName and labelSelector is allowed to match zero\nClusterTrustBundles.", + ) + path: str = Field( + ..., description="Relative path from the volume root to write the bundle." + ) + signerName: Optional[str] = Field( + default=None, + description="Select all ClusterTrustBundles that match this signer name.\nMutually-exclusive with name. The contents of all selected\nClusterTrustBundles will be unified and deduplicated.", + ) + + class Item2(BaseCRD): model_config = ConfigDict( extra="allow", @@ -2036,9 +2287,9 @@ class ConfigMap1(BaseCRD): default=None, description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, @@ -2046,13 +2297,44 @@ class ConfigMap1(BaseCRD): ) +class Divisor4(RootModel[int]): + root: int = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Divisor5(RootModel[str]): + root: str = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class ResourceFieldRef2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + containerName: Optional[str] = Field( + default=None, + description="Container name: required for volumes, optional for env vars", + ) + divisor: Optional[Union[Divisor4, Divisor5]] = Field( + default=None, + description='Specifies the output format of the exposed resources, defaults to "1"', + ) + resource: str = Field(..., description="Required: resource to select") + + class Item3(BaseCRD): model_config = ConfigDict( extra="allow", ) fieldRef: Optional[FieldRef] = Field( default=None, - description="Required: Selects a field of the pod: only annotations, labels, name and namespace are supported.", + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", ) mode: Optional[int] = Field( default=None, @@ -2062,7 +2344,7 @@ class Item3(BaseCRD): ..., description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", ) - resourceFieldRef: Optional[ResourceFieldRef] = Field( + resourceFieldRef: Optional[ResourceFieldRef2] = Field( default=None, description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", ) @@ -2100,9 +2382,9 @@ class Secret(BaseCRD): default=None, description="items if unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, @@ -2132,6 +2414,10 @@ class Source(BaseCRD): model_config = ConfigDict( extra="allow", ) + clusterTrustBundle: Optional[ClusterTrustBundle] = Field( + default=None, + description="ClusterTrustBundle allows a pod to access the `.spec.trustBundle` field\nof ClusterTrustBundle objects in an auto-updating file.\n\nAlpha, gated by the ClusterTrustBundleProjection feature gate.\n\nClusterTrustBundle objects can either be selected by name, or by the\ncombination of signer name and a label selector.\n\nKubelet performs aggressive normalization of the PEM contents written\ninto the pod filesystem. Esoteric PEM features such as inter-block\ncomments and block headers are stripped. Certificates are deduplicated.\nThe ordering of certificates within the file is arbitrary, and Kubelet\nmay change the order over time.", + ) configMap: Optional[ConfigMap1] = Field( default=None, description="configMap information about the configMap data to project", @@ -2158,7 +2444,8 @@ class Projected(BaseCRD): description="defaultMode are the mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", ) sources: Optional[List[Source]] = Field( - default=None, description="sources is the list of volume projections" + default=None, + description="sources is the list of volume projections. Each entry in this list\nhandles one source.", ) @@ -2197,22 +2484,22 @@ class Rbd(BaseCRD): ) fsType: Optional[str] = Field( default=None, - description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#rbd\nTODO: how do we prevent errors in the filesystem from compromising the machine', + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#rbd', ) image: str = Field( ..., description="image is the rados image name.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) - keyring: Optional[str] = Field( - default=None, + keyring: str = Field( + default="/etc/ceph/keyring", description="keyring is the path to key ring for RBDUser.\nDefault is /etc/ceph/keyring.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) monitors: List[str] = Field( ..., description="monitors is a collection of Ceph monitors.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) - pool: Optional[str] = Field( - default=None, + pool: str = Field( + default="rbd", description="pool is the rados pool name.\nDefault is rbd.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) readOnly: Optional[bool] = Field( @@ -2223,8 +2510,8 @@ class Rbd(BaseCRD): default=None, description="secretRef is name of the authentication secret for RBDUser. If provided\noverrides keyring.\nDefault is nil.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) - user: Optional[str] = Field( - default=None, + user: str = Field( + default="admin", description="user is the rados user name.\nDefault is admin.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) @@ -2233,8 +2520,8 @@ class ScaleIO(BaseCRD): model_config = ConfigDict( extra="allow", ) - fsType: Optional[str] = Field( - default=None, + fsType: str = Field( + default="xfs", description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs".\nDefault is "xfs".', ) gateway: str = Field( @@ -2256,8 +2543,8 @@ class ScaleIO(BaseCRD): default=None, description="sslEnabled Flag enable/disable SSL communication with Gateway, default false", ) - storageMode: Optional[str] = Field( - default=None, + storageMode: str = Field( + default="ThinProvisioned", description="storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned.\nDefault is ThinProvisioned.", ) storagePool: Optional[str] = Field( @@ -2349,23 +2636,23 @@ class ExtraVolume(BaseCRD): ) awsElasticBlockStore: Optional[AwsElasticBlockStore] = Field( default=None, - description="awsElasticBlockStore represents an AWS Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", + description="awsElasticBlockStore represents an AWS Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nDeprecated: AWSElasticBlockStore is deprecated. All operations for the in-tree\nawsElasticBlockStore type are redirected to the ebs.csi.aws.com CSI driver.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", ) azureDisk: Optional[AzureDisk] = Field( default=None, - description="azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.", + description="azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.\nDeprecated: AzureDisk is deprecated. All operations for the in-tree azureDisk type\nare redirected to the disk.csi.azure.com CSI driver.", ) azureFile: Optional[AzureFile] = Field( default=None, - description="azureFile represents an Azure File Service mount on the host and bind mount to the pod.", + description="azureFile represents an Azure File Service mount on the host and bind mount to the pod.\nDeprecated: AzureFile is deprecated. All operations for the in-tree azureFile type\nare redirected to the file.csi.azure.com CSI driver.", ) cephfs: Optional[Cephfs] = Field( default=None, - description="cephFS represents a Ceph FS mount on the host that shares a pod's lifetime", + description="cephFS represents a Ceph FS mount on the host that shares a pod's lifetime.\nDeprecated: CephFS is deprecated and the in-tree cephfs type is no longer supported.", ) cinder: Optional[Cinder] = Field( default=None, - description="cinder represents a cinder volume attached and mounted on kubelets host machine.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + description="cinder represents a cinder volume attached and mounted on kubelets host machine.\nDeprecated: Cinder is deprecated. All operations for the in-tree cinder type\nare redirected to the cinder.csi.openstack.org CSI driver.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", ) configMap: Optional[ConfigMap] = Field( default=None, @@ -2373,7 +2660,7 @@ class ExtraVolume(BaseCRD): ) csi: Optional[Csi] = Field( default=None, - description="csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).", + description="csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers.", ) downwardAPI: Optional[DownwardAPI] = Field( default=None, @@ -2385,7 +2672,7 @@ class ExtraVolume(BaseCRD): ) ephemeral: Optional[Ephemeral] = Field( default=None, - description="ephemeral represents a volume that is handled by a cluster storage driver.\nThe volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts,\nand deleted when the pod is removed.\n\n\nUse this if:\na) the volume is only needed while the pod runs,\nb) features of normal volumes like restoring from snapshot or capacity\n tracking are needed,\nc) the storage driver is specified through a storage class, and\nd) the storage driver supports dynamic volume provisioning through\n a PersistentVolumeClaim (see EphemeralVolumeSource for more\n information on the connection between this volume type\n and PersistentVolumeClaim).\n\n\nUse PersistentVolumeClaim or one of the vendor-specific\nAPIs for volumes that persist for longer than the lifecycle\nof an individual pod.\n\n\nUse CSI for light-weight local ephemeral volumes if the CSI driver is meant to\nbe used that way - see the documentation of the driver for\nmore information.\n\n\nA pod can use both types of ephemeral volumes and\npersistent volumes at the same time.", + description="ephemeral represents a volume that is handled by a cluster storage driver.\nThe volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts,\nand deleted when the pod is removed.\n\nUse this if:\na) the volume is only needed while the pod runs,\nb) features of normal volumes like restoring from snapshot or capacity\n tracking are needed,\nc) the storage driver is specified through a storage class, and\nd) the storage driver supports dynamic volume provisioning through\n a PersistentVolumeClaim (see EphemeralVolumeSource for more\n information on the connection between this volume type\n and PersistentVolumeClaim).\n\nUse PersistentVolumeClaim or one of the vendor-specific\nAPIs for volumes that persist for longer than the lifecycle\nof an individual pod.\n\nUse CSI for light-weight local ephemeral volumes if the CSI driver is meant to\nbe used that way - see the documentation of the driver for\nmore information.\n\nA pod can use both types of ephemeral volumes and\npersistent volumes at the same time.", ) fc: Optional[Fc] = Field( default=None, @@ -2393,27 +2680,31 @@ class ExtraVolume(BaseCRD): ) flexVolume: Optional[FlexVolume] = Field( default=None, - description="flexVolume represents a generic volume resource that is\nprovisioned/attached using an exec based plugin.", + description="flexVolume represents a generic volume resource that is\nprovisioned/attached using an exec based plugin.\nDeprecated: FlexVolume is deprecated. Consider using a CSIDriver instead.", ) flocker: Optional[Flocker] = Field( default=None, - description="flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running", + description="flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running.\nDeprecated: Flocker is deprecated and the in-tree flocker type is no longer supported.", ) gcePersistentDisk: Optional[GcePersistentDisk] = Field( default=None, - description="gcePersistentDisk represents a GCE Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", + description="gcePersistentDisk represents a GCE Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nDeprecated: GCEPersistentDisk is deprecated. All operations for the in-tree\ngcePersistentDisk type are redirected to the pd.csi.storage.gke.io CSI driver.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", ) gitRepo: Optional[GitRepo] = Field( default=None, - description="gitRepo represents a git repository at a particular revision.\nDEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an\nEmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir\ninto the Pod's container.", + description="gitRepo represents a git repository at a particular revision.\nDeprecated: GitRepo is deprecated. To provision a container with a git repo, mount an\nEmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir\ninto the Pod's container.", ) glusterfs: Optional[Glusterfs] = Field( default=None, - description="glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md", + description="glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime.\nDeprecated: Glusterfs is deprecated and the in-tree glusterfs type is no longer supported.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md", ) hostPath: Optional[HostPath] = Field( default=None, - description="hostPath represents a pre-existing file or directory on the host\nmachine that is directly exposed to the container. This is generally\nused for system agents or other privileged things that are allowed\nto see the host machine. Most containers will NOT need this.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath\n---\nTODO(jonesdl) We need to restrict who can use host directory mounts and who can/can not\nmount host directories as read/write.", + description="hostPath represents a pre-existing file or directory on the host\nmachine that is directly exposed to the container. This is generally\nused for system agents or other privileged things that are allowed\nto see the host machine. Most containers will NOT need this.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath", + ) + image: Optional[Image] = Field( + default=None, + description="image represents an OCI object (a container image or artifact) pulled and mounted on the kubelet's host machine.\nThe volume is resolved at pod startup depending on which PullPolicy value is provided:\n\n- Always: the kubelet always attempts to pull the reference. Container creation will fail If the pull fails.\n- Never: the kubelet never pulls the reference and only uses a local image or artifact. Container creation will fail if the reference isn't present.\n- IfNotPresent: the kubelet pulls if the reference isn't already present on disk. Container creation will fail if the reference isn't present and the pull fails.\n\nThe volume gets re-resolved if the pod gets deleted and recreated, which means that new remote content will become available on pod recreation.\nA failure to resolve or pull the image during pod startup will block containers from starting and may add significant latency. Failures will be retried using normal volume backoff and will be reported on the pod reason and message.\nThe types of objects that may be mounted by this volume are defined by the container runtime implementation on a host machine and at minimum must include all valid types supported by the container image field.\nThe OCI object gets mounted in a single directory (spec.containers[*].volumeMounts.mountPath) by merging the manifest layers in the same way as for container images.\nThe volume will be mounted read-only (ro) and non-executable files (noexec).\nSub path mounts for containers are not supported (spec.containers[*].volumeMounts.subpath) before 1.33.\nThe field spec.securityContext.fsGroupChangePolicy has no effect on this volume type.", ) iscsi: Optional[Iscsi] = Field( default=None, @@ -2433,11 +2724,11 @@ class ExtraVolume(BaseCRD): ) photonPersistentDisk: Optional[PhotonPersistentDisk] = Field( default=None, - description="photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine", + description="photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine.\nDeprecated: PhotonPersistentDisk is deprecated and the in-tree photonPersistentDisk type is no longer supported.", ) portworxVolume: Optional[PortworxVolume] = Field( default=None, - description="portworxVolume represents a portworx volume attached and mounted on kubelets host machine", + description="portworxVolume represents a portworx volume attached and mounted on kubelets host machine.\nDeprecated: PortworxVolume is deprecated. All operations for the in-tree portworxVolume type\nare redirected to the pxd.portworx.com CSI driver when the CSIMigrationPortworx feature-gate\nis on.", ) projected: Optional[Projected] = Field( default=None, @@ -2445,15 +2736,15 @@ class ExtraVolume(BaseCRD): ) quobyte: Optional[Quobyte] = Field( default=None, - description="quobyte represents a Quobyte mount on the host that shares a pod's lifetime", + description="quobyte represents a Quobyte mount on the host that shares a pod's lifetime.\nDeprecated: Quobyte is deprecated and the in-tree quobyte type is no longer supported.", ) rbd: Optional[Rbd] = Field( default=None, - description="rbd represents a Rados Block Device mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/rbd/README.md", + description="rbd represents a Rados Block Device mount on the host that shares a pod's lifetime.\nDeprecated: RBD is deprecated and the in-tree rbd type is no longer supported.\nMore info: https://examples.k8s.io/volumes/rbd/README.md", ) scaleIO: Optional[ScaleIO] = Field( default=None, - description="scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.", + description="scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.\nDeprecated: ScaleIO is deprecated and the in-tree scaleIO type is no longer supported.", ) secret: Optional[Secret1] = Field( default=None, @@ -2461,14 +2752,29 @@ class ExtraVolume(BaseCRD): ) storageos: Optional[Storageos] = Field( default=None, - description="storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.", + description="storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.\nDeprecated: StorageOS is deprecated and the in-tree storageos type is no longer supported.", ) vsphereVolume: Optional[VsphereVolume] = Field( default=None, - description="vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine", + description="vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine.\nDeprecated: VsphereVolume is deprecated. All operations for the in-tree vsphereVolume type\nare redirected to the csi.vsphere.vmware.com CSI driver.", ) +class ImagePullSecret(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + adopt: Optional[bool] = Field( + default=None, + description="If the secret is adopted then the operator will delete the secret when the custom resource that uses it is deleted.", + ) + key: Optional[str] = Field( + default=None, + description="The key is optional because it may not be relevant depending on where or how the secret is used.\nFor example, for authentication see the `secretRef` field in `spec.authentication`\nfor more details.", + ) + name: str + + class TlsSecret(BaseCRD): model_config = ConfigDict( extra="allow", @@ -2477,6 +2783,10 @@ class TlsSecret(BaseCRD): default=None, description="If the secret is adopted then the operator will delete the secret when the custom resource that uses it is deleted.", ) + key: Optional[str] = Field( + default=None, + description="The key is optional because it may not be relevant depending on where or how the secret is used.\nFor example, for authentication see the `secretRef` field in `spec.authentication`\nfor more details.", + ) name: str @@ -2497,6 +2807,37 @@ class Ingress(BaseCRD): ) +class Divisor6(RootModel[int]): + root: int = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Divisor7(RootModel[str]): + root: str = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class ResourceFieldRef3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + containerName: Optional[str] = Field( + default=None, + description="Container name: required for volumes, optional for env vars", + ) + divisor: Optional[Union[Divisor6, Divisor7]] = Field( + default=None, + description='Specifies the output format of the exposed resources, defaults to "1"', + ) + resource: str = Field(..., description="Required: resource to select") + + class ValueFrom1(BaseCRD): model_config = ConfigDict( extra="allow", @@ -2508,7 +2849,7 @@ class ValueFrom1(BaseCRD): default=None, description="Selects a field of the pod: supports metadata.name, metadata.namespace, `metadata.labels['']`, `metadata.annotations['']`,\nspec.nodeName, spec.serviceAccountName, status.hostIP, status.podIP, status.podIPs.", ) - resourceFieldRef: Optional[ResourceFieldRef] = Field( + resourceFieldRef: Optional[ResourceFieldRef3] = Field( default=None, description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, limits.ephemeral-storage, requests.cpu, requests.memory and requests.ephemeral-storage) are currently supported.", ) @@ -2538,9 +2879,9 @@ class SecretRef10(BaseCRD): model_config = ConfigDict( extra="allow", ) - name: Optional[str] = Field( - default=None, - description="Name of the referent.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Add other useful fields. apiVersion, kind, uid?", + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names", ) optional: Optional[bool] = Field( default=None, description="Specify whether the Secret must be defined" @@ -2556,7 +2897,7 @@ class EnvFromItem1(BaseCRD): ) prefix: Optional[str] = Field( default=None, - description="An optional identifier to prepend to each key in the ConfigMap. Must be a C_IDENTIFIER.", + description="Optional text to prepend to the name of each environment variable. Must be a C_IDENTIFIER.", ) secretRef: Optional[SecretRef10] = Field( default=None, description="The Secret to select from" @@ -2593,14 +2934,19 @@ class PostStart1(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) httpGet: Optional[HttpGet5] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." + ) + sleep: Optional[Sleep] = Field( + default=None, + description="Sleep represents a duration that the container should sleep.", ) tcpSocket: Optional[TcpSocket] = Field( default=None, - description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor the backward compatibility. There are no validation of this field and\nlifecycle hooks will fail in runtime when tcp handler is specified.", + description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor backward compatibility. There is no validation of this field and\nlifecycle hooks will fail at runtime when it is specified.", ) @@ -2634,14 +2980,19 @@ class PreStop1(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) httpGet: Optional[HttpGet6] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." + ) + sleep: Optional[Sleep] = Field( + default=None, + description="Sleep represents a duration that the container should sleep.", ) tcpSocket: Optional[TcpSocket] = Field( default=None, - description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor the backward compatibility. There are no validation of this field and\nlifecycle hooks will fail in runtime when tcp handler is specified.", + description="Deprecated. TCPSocket is NOT supported as a LifecycleHandler and kept\nfor backward compatibility. There is no validation of this field and\nlifecycle hooks will fail at runtime when it is specified.", ) @@ -2657,6 +3008,10 @@ class Lifecycle1(BaseCRD): default=None, description="PreStop is called immediately before a container is terminated due to an\nAPI request or management event such as liveness/startup probe failure,\npreemption, resource contention, etc. The handler is not called if the\ncontainer crashes or exits. The Pod's termination grace period countdown begins before the\nPreStop hook is executed. Regardless of the outcome of the handler, the\ncontainer will eventually terminate within the Pod's termination grace\nperiod (unless delayed by finalizers). Other management of the container blocks until the hook completes\nor until the termination grace period is reached.\nMore info: https://kubernetes.io/docs/concepts/containers/container-lifecycle-hooks/#container-hooks", ) + stopSignal: Optional[str] = Field( + default=None, + description="StopSignal defines which signal will be sent to a container when it is being stopped.\nIf not specified, the default is defined by the container runtime in use.\nStopSignal can only be set for Pods with a non-empty .spec.os.name", + ) class HttpGet7(BaseCRD): @@ -2689,17 +3044,18 @@ class LivenessProbe1(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) failureThreshold: Optional[int] = Field( default=None, description="Minimum consecutive failures for the probe to be considered failed after having succeeded.\nDefaults to 3. Minimum value is 1.", ) grpc: Optional[Grpc] = Field( - default=None, description="GRPC specifies an action involving a GRPC port." + default=None, description="GRPC specifies a GRPC HealthCheckRequest." ) httpGet: Optional[HttpGet7] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." ) initialDelaySeconds: Optional[int] = Field( default=None, @@ -2714,7 +3070,7 @@ class LivenessProbe1(BaseCRD): description="Minimum consecutive successes for the probe to be considered successful after having failed.\nDefaults to 1. Must be 1 for liveness and startup. Minimum value is 1.", ) tcpSocket: Optional[TcpSocket] = Field( - default=None, description="TCPSocket specifies an action involving a TCP port." + default=None, description="TCPSocket specifies a connection to a TCP port." ) terminationGracePeriodSeconds: Optional[int] = Field( default=None, @@ -2756,17 +3112,18 @@ class ReadinessProbe1(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) failureThreshold: Optional[int] = Field( default=None, description="Minimum consecutive failures for the probe to be considered failed after having succeeded.\nDefaults to 3. Minimum value is 1.", ) grpc: Optional[Grpc] = Field( - default=None, description="GRPC specifies an action involving a GRPC port." + default=None, description="GRPC specifies a GRPC HealthCheckRequest." ) httpGet: Optional[HttpGet8] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." ) initialDelaySeconds: Optional[int] = Field( default=None, @@ -2781,7 +3138,7 @@ class ReadinessProbe1(BaseCRD): description="Minimum consecutive successes for the probe to be considered successful after having failed.\nDefaults to 1. Must be 1 for liveness and startup. Minimum value is 1.", ) tcpSocket: Optional[TcpSocket] = Field( - default=None, description="TCPSocket specifies an action involving a TCP port." + default=None, description="TCPSocket specifies a connection to a TCP port." ) terminationGracePeriodSeconds: Optional[int] = Field( default=None, @@ -2793,19 +3150,47 @@ class ReadinessProbe1(BaseCRD): ) +class Limits4(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Limits5(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests4(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests5(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + class Resources2(BaseCRD): model_config = ConfigDict( extra="allow", ) claims: Optional[List[Claim]] = Field( default=None, - description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\n\nThis field is immutable. It can only be set for containers.", + description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\nThis field is immutable. It can only be set for containers.", ) - limits: Optional[Dict[str, Union[int, str]]] = Field( + limits: Optional[Dict[str, Union[Limits4, Limits5]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[int, str]]] = Field( + requests: Optional[Dict[str, Union[Requests4, Requests5]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) @@ -2819,6 +3204,10 @@ class SecurityContext1(BaseCRD): default=None, description="AllowPrivilegeEscalation controls whether a process can gain more\nprivileges than its parent process. This bool directly controls if\nthe no_new_privs flag will be set on the container process.\nAllowPrivilegeEscalation is true always when the container is:\n1) run as Privileged\n2) has CAP_SYS_ADMIN\nNote that this field cannot be set when spec.os.name is windows.", ) + appArmorProfile: Optional[AppArmorProfile] = Field( + default=None, + description="appArmorProfile is the AppArmor options to use by this container. If set, this profile\noverrides the pod's appArmorProfile.\nNote that this field cannot be set when spec.os.name is windows.", + ) capabilities: Optional[Capabilities] = Field( default=None, description="The capabilities to add/drop when running containers.\nDefaults to the default set of capabilities granted by the container runtime.\nNote that this field cannot be set when spec.os.name is windows.", @@ -2829,7 +3218,7 @@ class SecurityContext1(BaseCRD): ) procMount: Optional[str] = Field( default=None, - description="procMount denotes the type of proc mount to use for the containers.\nThe default is DefaultProcMount which uses the container runtime defaults for\nreadonly paths and masked paths.\nThis requires the ProcMountType feature flag to be enabled.\nNote that this field cannot be set when spec.os.name is windows.", + description="procMount denotes the type of proc mount to use for the containers.\nThe default value is Default which uses the container runtime defaults for\nreadonly paths and masked paths.\nThis requires the ProcMountType feature flag to be enabled.\nNote that this field cannot be set when spec.os.name is windows.", ) readOnlyRootFilesystem: Optional[bool] = Field( default=None, @@ -2891,17 +3280,18 @@ class StartupProbe1(BaseCRD): extra="allow", ) exec: Optional[Exec] = Field( - default=None, description="Exec specifies the action to take." + default=None, + description="Exec specifies a command to execute in the container.", ) failureThreshold: Optional[int] = Field( default=None, description="Minimum consecutive failures for the probe to be considered failed after having succeeded.\nDefaults to 3. Minimum value is 1.", ) grpc: Optional[Grpc] = Field( - default=None, description="GRPC specifies an action involving a GRPC port." + default=None, description="GRPC specifies a GRPC HealthCheckRequest." ) httpGet: Optional[HttpGet9] = Field( - default=None, description="HTTPGet specifies the http request to perform." + default=None, description="HTTPGet specifies an HTTP GET request to perform." ) initialDelaySeconds: Optional[int] = Field( default=None, @@ -2916,7 +3306,7 @@ class StartupProbe1(BaseCRD): description="Minimum consecutive successes for the probe to be considered successful after having failed.\nDefaults to 1. Must be 1 for liveness and startup. Minimum value is 1.", ) tcpSocket: Optional[TcpSocket] = Field( - default=None, description="TCPSocket specifies an action involving a TCP port." + default=None, description="TCPSocket specifies a connection to a TCP port." ) terminationGracePeriodSeconds: Optional[int] = Field( default=None, @@ -3035,6 +3425,37 @@ class ReconcileStrategy(Enum): whenFailedOrHibernated = "whenFailedOrHibernated" +class Divisor8(RootModel[int]): + root: int = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Divisor9(RootModel[str]): + root: str = Field( + ..., + description='Specifies the output format of the exposed resources, defaults to "1"', + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class ResourceFieldRef4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + containerName: Optional[str] = Field( + default=None, + description="Container name: required for volumes, optional for env vars", + ) + divisor: Optional[Union[Divisor8, Divisor9]] = Field( + default=None, + description='Specifies the output format of the exposed resources, defaults to "1"', + ) + resource: str = Field(..., description="Required: resource to select") + + class ValueFrom2(BaseCRD): model_config = ConfigDict( extra="allow", @@ -3046,7 +3467,7 @@ class ValueFrom2(BaseCRD): default=None, description="Selects a field of the pod: supports metadata.name, metadata.namespace, `metadata.labels['']`, `metadata.annotations['']`,\nspec.nodeName, spec.serviceAccountName, status.hostIP, status.podIP, status.podIPs.", ) - resourceFieldRef: Optional[ResourceFieldRef] = Field( + resourceFieldRef: Optional[ResourceFieldRef4] = Field( default=None, description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, limits.ephemeral-storage, requests.cpu, requests.memory and requests.ephemeral-storage) are currently supported.", ) @@ -3072,6 +3493,12 @@ class EnvItem2(BaseCRD): ) +class ImagePullPolicy(Enum): + Always = "Always" + Never = "Never" + IfNotPresent = "IfNotPresent" + + class Type3(Enum): none = "none" tcp = "tcp" @@ -3085,24 +3512,82 @@ class ReadinessProbe2(BaseCRD): type: Type3 = Field(default="tcp", description="The type of readiness probe") +class Limits6(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Limits7(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests6(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Requests7(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + class Resources3(BaseCRD): model_config = ConfigDict( extra="allow", ) claims: Optional[List[Claim]] = Field( default=None, - description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\n\nThis field is immutable. It can only be set for containers.", + description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\nThis field is immutable. It can only be set for containers.", ) - limits: Optional[Dict[str, Union[int, str]]] = Field( + limits: Optional[Dict[str, Union[Limits6, Limits7]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[int, str]]] = Field( + requests: Optional[Dict[str, Union[Requests6, Requests7]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) +class ShmSize(RootModel[int]): + root: int = Field( + ..., + description="Size of /dev/shm", + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class ShmSize1(RootModel[str]): + root: str = Field( + ..., + description="Size of /dev/shm", + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Size(RootModel[int]): + root: int = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + +class Size1(RootModel[str]): + root: str = Field( + ..., + pattern="^(\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))(([KMGTPE]i)|[numkMGTPE]|([eE](\\+|-)?(([0-9]+(\\.[0-9]*)?)|(\\.[0-9]+))))?$", + ) + + class Storage(BaseCRD): model_config = ConfigDict( extra="allow", @@ -3112,7 +3597,7 @@ class Storage(BaseCRD): default="/workspace", description="The absolute mount path for the session volume", ) - size: Union[int, str] = "1Gi" + size: Union[Size, Size1] = "1Gi" class Session(BaseCRD): @@ -3132,11 +3617,15 @@ class Session(BaseCRD): default=None, description="Additional volume mounts for the session container" ) image: str + imagePullPolicy: ImagePullPolicy = Field( + default="Always", + description="The image pull policy to apply to the session image", + ) port: int = Field( default=8000, - description="The TCP port on the pod where the session can be accessed.\nIf the session has authentication enabled then the ingress and service will point to the authentication container\nand the authentication proxy container will proxy to this port. If authentication is disabled then the ingress and service\nroute directly to this port. Note that renku reserves the highest TCP value 65535 to run the authentication proxy.", + description="The TCP port on the pod where the session can be accessed.\nIf the session has authentication enabled then the ingress and service will point to the authentication container\nand the authentication proxy container will proxy to this port. If authentication is disabled then the ingress and service\nroute directly to this port. Note that renku reserves the highest TCP values in the range 65530 to 65535 to run the authentication proxy and other auxiliary services.", gt=0, - lt=65535, + lt=65530, ) readinessProbe: ReadinessProbe2 = Field( default={}, description="The readiness probe to use on the session container" @@ -3151,10 +3640,14 @@ class Session(BaseCRD): ge=0, ) runAsUser: int = Field(default=1000, ge=0) - shmSize: Optional[Union[int, str]] = Field( + shmSize: Optional[Union[ShmSize, ShmSize1]] = Field( default=None, description="Size of /dev/shm" ) storage: Storage = {} + stripURLPath: bool = Field( + default=False, + description="Will strip the url path defined in URLPath above from all requests that reach the session.\nThis is useful for session frontends like Rstudio which cannot run on any path other than `/`", + ) urlPath: str = Field( default="/", description="The path where the session can be accessed, if an ingress is used this should be a subpath\nof the ingress.pathPrefix field. For example if the pathPrefix is /foo, this should be /foo or /foo/bar,\nbut it cannot be /baz.", @@ -3225,6 +3718,10 @@ class Spec(BaseCRD): ..., description="Will hibernate the session, scaling the session's statefulset to zero.", ) + imagePullSecrets: Optional[List[ImagePullSecret]] = Field( + default=None, + description="List of secrets that contain credentials for pulling private images", + ) ingress: Optional[Ingress] = Field( default=None, description="Configuration for an ingress to the session, if omitted a Kubernetes Ingress will not be created", @@ -3245,6 +3742,10 @@ class Spec(BaseCRD): default="always", description="Indicates how Amalthea should reconcile the child resources for a session. This can be problematic because\nnewer versions of Amalthea may include new versions of the sidecars or other changes not reflected\nin the AmaltheaSession CRD, so simply updating Amalthea could cause existing sessions to restart\nbecause the sidecars will have a newer image or for other reasons because the code changed.\nHibernating the session and deleting it will always work as expected regardless of the strategy.\nThe status of the session and all hibernation or auto-cleanup functionality will always work as expected.\nA few values are possible:\n- never: Amalthea will never update any of the child resources and will ignore any changes to the CR\n- always: This is the expected method of operation for an operator, changes to the spec are always reconciled\n- whenHibernatedOrFailed: To avoid interrupting a running session, reconciliation of the child components\n are only done when the session has a Failed or Hibernated status", ) + serviceAccountName: Optional[str] = Field( + default=None, + description="The name of the service account that should be used for the session Pod", + ) session: Session = Field( ..., description="Specification for the main session container that the user will access and use", diff --git a/components/renku_data_services/notebooks/cr_jupyter_server.py b/components/renku_data_services/notebooks/cr_jupyter_server.py index f4830e13e..9d42a5cfb 100644 --- a/components/renku_data_services/notebooks/cr_jupyter_server.py +++ b/components/renku_data_services/notebooks/cr_jupyter_server.py @@ -1,16 +1,48 @@ # generated by datamodel-codegen: # filename: -# timestamp: 2024-09-04T22:45:30+00:00 +# timestamp: 2025-07-07T13:49:35+00:00 from __future__ import annotations from enum import Enum -from typing import Any, Dict, List, Optional +from typing import Any, Dict, List, Optional, Union from pydantic import ConfigDict, Field from renku_data_services.notebooks.cr_base import BaseCRD +class SecretKeyRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: Optional[str] = None + name: Optional[str] = None + + +class ClientSecret(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + secretKeyRef: Optional[SecretKeyRef] = Field( + default=None, + description="A regular reference to the key/secret which holds the client secret of the application registered with the OIDC provider. Note that the secret has to be in the same namespace in which the custom resource object is going to be created.", + ) + value: str = Field(..., description="The secret provided as a string value.") + + +class ClientSecret1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + secretKeyRef: SecretKeyRef = Field( + ..., + description="A regular reference to the key/secret which holds the client secret of the application registered with the OIDC provider. Note that the secret has to be in the same namespace in which the custom resource object is going to be created.", + ) + value: Optional[str] = Field( + default=None, description="The secret provided as a string value." + ) + + class Oidc(BaseCRD): model_config = ConfigDict( extra="allow", @@ -27,6 +59,10 @@ class Oidc(BaseCRD): default=None, description="The client id of the application registered with the OIDC provider, see `--client-id` here: https://oauth2-proxy.github.io/oauth2-proxy/docs/configuration/overview/#command-line-options", ) + clientSecret: Optional[Union[ClientSecret, ClientSecret1]] = Field( + default=None, + description="The client secret of the application registered with the OIDC provider. This secret can be given here explicitly as string or through a reference to an existing secret. Using the secret reference is the preferred option because it avoids storing the secret in clear text on the custom resource specification. See `--client-secret` here: https://oauth2-proxy.github.io/oauth2-proxy/docs/configuration/overview/#command-line-options", + ) enabled: bool = False issuerUrl: Optional[str] = Field( default=None, diff --git a/components/renku_data_services/notebooks/crs.py b/components/renku_data_services/notebooks/crs.py index 871ce59b1..69e5b4885 100644 --- a/components/renku_data_services/notebooks/crs.py +++ b/components/renku_data_services/notebooks/crs.py @@ -1,24 +1,29 @@ """Custom resource definition with proper names from the autogenerated code.""" -from datetime import UTC, datetime -from typing import Any, cast +import re +from datetime import datetime, timedelta +from typing import Any, cast, override from urllib.parse import urlunparse from kubernetes.utils import parse_duration, parse_quantity -from pydantic import BaseModel, Field, field_validator +from kubernetes.utils.duration import format_duration +from pydantic import BaseModel, Field, field_serializer, field_validator, model_serializer from ulid import ULID from renku_data_services.errors import errors from renku_data_services.notebooks import apispec +from renku_data_services.notebooks.constants import AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK from renku_data_services.notebooks.cr_amalthea_session import ( Affinity, Authentication, CodeRepository, - Culling, DataSource, + EmptyDir, ExtraContainer, ExtraVolume, ExtraVolumeMount, + ImagePullPolicy, + ImagePullSecret, Ingress, InitContainer, MatchExpression, @@ -30,22 +35,31 @@ RequiredDuringSchedulingIgnoredDuringExecution, SecretRef, Session, + Size, State, Status, Storage, TlsSecret, Toleration, ) +from renku_data_services.notebooks.cr_amalthea_session import ( + Culling as _ASCulling, +) from renku_data_services.notebooks.cr_amalthea_session import EnvItem2 as SessionEnvItem from renku_data_services.notebooks.cr_amalthea_session import Item4 as SecretAsVolumeItem +from renku_data_services.notebooks.cr_amalthea_session import Limits6 as Limits +from renku_data_services.notebooks.cr_amalthea_session import Limits7 as LimitsStr from renku_data_services.notebooks.cr_amalthea_session import Model as _ASModel +from renku_data_services.notebooks.cr_amalthea_session import Requests6 as Requests +from renku_data_services.notebooks.cr_amalthea_session import Requests7 as RequestsStr from renku_data_services.notebooks.cr_amalthea_session import Resources3 as Resources from renku_data_services.notebooks.cr_amalthea_session import Secret1 as SecretAsVolume -from renku_data_services.notebooks.cr_amalthea_session import SecretRef as SecretRefKey -from renku_data_services.notebooks.cr_amalthea_session import SecretRef1 as SecretRefWhole -from renku_data_services.notebooks.cr_amalthea_session import Spec as AmaltheaSessionSpec +from renku_data_services.notebooks.cr_amalthea_session import ShmSize1 as ShmSizeStr +from renku_data_services.notebooks.cr_amalthea_session import Size1 as SizeStr +from renku_data_services.notebooks.cr_amalthea_session import Spec as _ASSpec from renku_data_services.notebooks.cr_amalthea_session import Type as AuthenticationType from renku_data_services.notebooks.cr_amalthea_session import Type1 as CodeRepositoryType +from renku_data_services.notebooks.cr_base import BaseCRD from renku_data_services.notebooks.cr_jupyter_server import Model as _JSModel from renku_data_services.notebooks.cr_jupyter_server import Patch from renku_data_services.notebooks.cr_jupyter_server import Spec as JupyterServerSpec @@ -82,6 +96,8 @@ class ComputeResources(BaseModel): def _convert_k8s_cpu(cls, val: Any) -> Any: if val is None: return None + if hasattr(val, "root"): + val = val.root return float(parse_quantity(val)) @field_validator("gpu", mode="before") @@ -89,6 +105,8 @@ def _convert_k8s_cpu(cls, val: Any) -> Any: def _convert_k8s_gpu(cls, val: Any) -> Any: if val is None: return None + if hasattr(val, "root"): + val = val.root return round(parse_quantity(val), ndigits=None) @field_validator("memory", "storage", mode="before") @@ -97,14 +115,16 @@ def _convert_k8s_bytes(cls, val: Any) -> Any: """Converts to gigabytes of base 10.""" if val is None: return None + if hasattr(val, "root"): + val = val.root return round(parse_quantity(val) / 1_000_000_000, ndigits=None) class JupyterServerV1Alpha1(_JSModel): """Jupyter server CRD.""" - kind: str = "JupyterServer" - apiVersion: str = "amalthea.dev/v1alpha1" + kind: str = JUPYTER_SESSION_GVK.kind + apiVersion: str = JUPYTER_SESSION_GVK.group_version metadata: Metadata def get_compute_resources(self) -> ComputeResources: @@ -115,19 +135,50 @@ def get_compute_resources(self) -> ComputeResources: resource_requests["storage"] = self.spec.storage.size return ComputeResources.model_validate(resource_requests) + def resource_class_id(self) -> int: + """Get the resource class from the annotations.""" + if "renku.io/resourceClassId" not in self.metadata.annotations: + raise errors.ProgrammingError( + message=f"The session with name {self.metadata.name} is missing its renku.io/resourceClassId annotation" + ) + i = int(self.metadata.annotations["renku.io/resourceClassId"]) + return i + + +class Culling(_ASCulling): + """Amalthea session culling configuration.""" + + @field_serializer("*", mode="wrap") + def __serialize_duration_field(self, val: Any, nxt: Any, _info: Any) -> Any: + if isinstance(val, timedelta): + return format_duration(val) + return nxt(val) + + @field_validator("*", mode="wrap") + @classmethod + def __deserialize_duration(cls, val: Any, handler: Any) -> Any: + if isinstance(val, str): + return safe_parse_duration(val) + return handler(val) + + +class AmaltheaSessionSpec(_ASSpec): + """Amalthea session specification.""" + + culling: Culling | None = None + class AmaltheaSessionV1Alpha1(_ASModel): """Amalthea session CRD.""" - kind: str = "AmaltheaSession" - apiVersion: str = "amalthea.dev/v1alpha1" + kind: str = AMALTHEA_SESSION_GVK.kind + apiVersion: str = AMALTHEA_SESSION_GVK.group_version # Here we overwrite the default from ASModel because it is too weakly typed metadata: Metadata # type: ignore[assignment] + spec: AmaltheaSessionSpec def get_compute_resources(self) -> ComputeResources: """Convert the k8s resource requests and storage into usable values.""" - if self.spec is None: - return ComputeResources() resource_requests: dict = {} if self.spec.session.resources is not None: resource_requests = self.spec.session.resources.requests or {} @@ -152,7 +203,6 @@ def launcher_id(self) -> ULID: ) return cast(ULID, ULID.from_str(self.metadata.annotations["renku.io/launcher_id"])) - @property def resource_class_id(self) -> int: """Get the resource class from the annotations.""" if "renku.io/resource_class_id" not in self.metadata.annotations: @@ -191,7 +241,14 @@ def as_apispec(self) -> apispec.SessionResponse: total_containers += self.status.containerCounts.total or 0 if self.status.state in [State.Running, State.Hibernated, State.Failed]: - state = apispec.State3(self.status.state.value.lower()) + # Amalthea is sometimes slow when (un)hibernating and still shows the old status, so we patch it here + # so the client sees the correct state + if not self.spec.hibernated and self.status.state == State.Hibernated: + state = apispec.State3.starting + elif self.spec.hibernated and self.status.state == State.Running: + state = apispec.State3.hibernated + else: + state = apispec.State3(self.status.state.value.lower()) elif self.status.state == State.RunningDegraded: state = apispec.State3.running elif self.status.state == State.NotReady and self.metadata.deletionTimestamp is not None: @@ -206,22 +263,22 @@ def as_apispec(self) -> apispec.SessionResponse: Status(idle=True, idleSince=idle_since), Culling(maxIdleDuration=max_idle), ) if idle_since and max_idle: - will_hibernate_at = idle_since + parse_duration(max_idle) + will_hibernate_at = idle_since + safe_parse_duration(max_idle) case ( Status(state=State.Failed, failingSince=failing_since), Culling(maxFailedDuration=max_failed), ) if failing_since and max_failed: - will_hibernate_at = failing_since + parse_duration(max_failed) + will_hibernate_at = failing_since + safe_parse_duration(max_failed) case ( Status(state=State.NotReady), Culling(maxAge=max_age), ) if max_age and self.metadata.creationTimestamp: - will_hibernate_at = self.metadata.creationTimestamp + parse_duration(max_age) + will_hibernate_at = self.metadata.creationTimestamp + safe_parse_duration(max_age) case ( Status(state=State.Hibernated, hibernatedSince=hibernated_since), Culling(maxHibernatedDuration=max_hibernated), ) if hibernated_since and max_hibernated: - will_delete_at = hibernated_since + parse_duration(max_hibernated) + will_delete_at = hibernated_since + safe_parse_duration(max_hibernated) return apispec.SessionResponse( image=self.spec.session.image, @@ -245,7 +302,7 @@ def as_apispec(self) -> apispec.SessionResponse: url=url, project_id=str(self.project_id), launcher_id=str(self.launcher_id), - resource_class_id=self.resource_class_id, + resource_class_id=self.resource_class_id(), ) @property @@ -265,3 +322,64 @@ def base_url(self) -> str | None: fragment = None url = urlunparse((scheme, host, path, params, query, fragment)) return url + + +class AmaltheaSessionV1Alpha1SpecSessionPatch(BaseCRD): + """Patch for the main session config.""" + + resources: Resources | None = None + shmSize: int | str | None = None + storage: Storage | None = None + imagePullPolicy: ImagePullPolicy | None = None + + +class AmaltheaSessionV1Alpha1SpecPatch(BaseCRD): + """Patch for the spec of an amalthea session.""" + + extraContainers: list[ExtraContainer] | None = None + extraVolumes: list[ExtraVolume] | None = None + hibernated: bool | None = None + initContainers: list[InitContainer] | None = None + imagePullSecrets: list[ImagePullSecret] | None = None + priorityClassName: str | None = None + tolerations: list[Toleration] | None = None + affinity: Affinity | None = None + session: AmaltheaSessionV1Alpha1SpecSessionPatch | None = None + culling: Culling | None = None + service_account_name: str | None = None + + +class AmaltheaSessionV1Alpha1Patch(BaseCRD): + """Patch for an amalthea session.""" + + spec: AmaltheaSessionV1Alpha1SpecPatch + + def to_rfc7386(self) -> dict[str, Any]: + """Generate the patch to be applied to the session.""" + return self.model_dump(exclude_none=True) + + +def safe_parse_duration(val: Any) -> timedelta: + """Required because parse_duration from k8s can only deal with values with up to 5 digits. + + Values with 1 unit only (like seconds) and high values will cause things to fail. + For example `parse_duration("1210500s")` will raise ValueError whereas `parse_duration("100s")` will be fine. + This does not make the whole thing 100% foolproof but it eliminates errors like the above which + we have seen in production. + """ + if isinstance(val, timedelta): + return val + m = re.match(r"^([0-9]+)(h|m|s|ms)$", str(val)) + if m is not None: + num = m.group(1) + unit = m.group(2) + match unit: + case "h": + return timedelta(hours=float(num)) + case "m": + return timedelta(minutes=float(num)) + case "s": + return timedelta(seconds=float(num)) + case "ms": + return timedelta(microseconds=float(num)) + return cast(timedelta, parse_duration(val)) diff --git a/components/renku_data_services/notebooks/errors/intermittent.py b/components/renku_data_services/notebooks/errors/intermittent.py index e03c695e4..8703e9f01 100644 --- a/components/renku_data_services/notebooks/errors/intermittent.py +++ b/components/renku_data_services/notebooks/errors/intermittent.py @@ -47,17 +47,17 @@ class CannotStartServerError(IntermittentError): @dataclass -class JSCacheError(IntermittentError): - """Raised when the jupyter server cache responds with anything other than a 200 status code. +class CacheError(IntermittentError): + """Raised when the k8s cache responds with anything other than a 200 status code. - This indicates trouble with the path requested (i.e. the jupyter cache is not aware - of the path) or the jupyter server cache is not functioning properly. When this error + This indicates trouble with the path requested (i.e. the k8s cache is not aware + of the path) or the k8s cache is not functioning properly. When this error is raised the regular (non-cached) k8s client takes over the fulfils the request. Please - note that this is possible because the jupyter server cache will respond with 200 and + note that this is possible because the k8s cache will respond with 200 and an empty response if resource that do not exist are requested. """ - message: str = "The jupyter server cache produced and unexpected error." + message: str = "The k8s cache produced and unexpected error." code: int = IntermittentError.code + 3 diff --git a/components/renku_data_services/notebooks/models.py b/components/renku_data_services/notebooks/models.py index fec11eb4b..7720c6936 100644 --- a/components/renku_data_services/notebooks/models.py +++ b/components/renku_data_services/notebooks/models.py @@ -1,11 +1,18 @@ """Basic models for amalthea sessions.""" -from dataclasses import dataclass +from dataclasses import dataclass, field from pathlib import Path +from kubernetes.client import V1Secret from pydantic import AliasGenerator, BaseModel, Field, Json -from renku_data_services.notebooks.crs import AmaltheaSessionV1Alpha1 +from renku_data_services.errors.errors import ProgrammingError +from renku_data_services.notebooks.crs import ( + AmaltheaSessionV1Alpha1, + ExtraVolume, + ExtraVolumeMount, + SecretRef, +) @dataclass @@ -74,3 +81,37 @@ def requested_env_vars(self) -> dict[str, SessionEnvVar]: """The environment variables requested.""" requested_names = self._metadata.annotations.env_variable_names return {ikey: ival for ikey, ival in self.env_vars.items() if ikey in requested_names} + + +@dataclass +class ExtraSecret: + """Specification for a K8s secret and its coresponding volumes and mounts.""" + + secret: V1Secret = field(repr=False) + volume: ExtraVolume | None = None + volume_mount: ExtraVolumeMount | None = None + adopt: bool = True + + def key_ref(self, key: str) -> SecretRef: + """Get an amalthea secret key reference.""" + meta = self.secret.metadata + if not meta: + raise ProgrammingError(message="Cannot get reference to a secret that does not have metadata.") + secret_name = meta.name + if not secret_name: + raise ProgrammingError(message="Cannot get reference to a secret that does not have a name.") + data = self.secret.data or {} + string_data = self.secret.string_data or {} + if key not in data and key not in string_data: + raise KeyError(f"Cannot find the key {key} in the secret with name {secret_name}") + return SecretRef(key=key, name=secret_name, adopt=self.adopt) + + def ref(self) -> SecretRef: + """Get an amalthea reference to the whole secret.""" + meta = self.secret.metadata + if not meta: + raise ProgrammingError(message="Cannot get reference to a secret that does not have metadata.") + secret_name = meta.name + if not secret_name: + raise ProgrammingError(message="Cannot get reference to a secret that does not have a name.") + return SecretRef(name=secret_name, adopt=self.adopt) diff --git a/components/renku_data_services/notebooks/util/kubernetes_.py b/components/renku_data_services/notebooks/util/kubernetes_.py index 75c384242..f64a219a7 100644 --- a/components/renku_data_services/notebooks/util/kubernetes_.py +++ b/components/renku_data_services/notebooks/util/kubernetes_.py @@ -24,18 +24,20 @@ from typing import Any, TypeAlias, cast import escapism -from kubernetes.client import V1Container +from box.box import Box from renku_data_services.base_models.core import AnonymousAPIUser, AuthenticatedAPIUser, Slug from renku_data_services.notebooks.crs import Patch, PatchType -def renku_1_make_server_name(safe_username: str, namespace: str, project: str, branch: str, commit_sha: str) -> str: +def renku_1_make_server_name( + safe_username: str, namespace: str, project: str, branch: str, commit_sha: str, cluster_id: str +) -> str: """Form a unique server name for Renku 1.0 sessions. This is used in naming all the k8s resources created by amalthea. """ - server_string_for_hashing = f"{safe_username}-{namespace}-{project}-{branch}-{commit_sha}" + server_string_for_hashing = f"{safe_username}-{namespace}-{project}-{branch}-{commit_sha}-{cluster_id}" server_hash = md5(server_string_for_hashing.encode(), usedforsecurity=False).hexdigest().lower() prefix = _make_server_name_prefix(safe_username) # NOTE: A K8s object name can only contain lowercase alphanumeric characters, hyphens, or dots. @@ -52,7 +54,9 @@ def renku_1_make_server_name(safe_username: str, namespace: str, project: str, b ) -def renku_2_make_server_name(user: AuthenticatedAPIUser | AnonymousAPIUser, project_id: str, launcher_id: str) -> str: +def renku_2_make_server_name( + user: AuthenticatedAPIUser | AnonymousAPIUser, project_id: str, launcher_id: str, cluster_id: str +) -> str: """Form a unique server name for Renku 2.0 sessions. This is used in naming all the k8s resources created by amalthea. @@ -61,31 +65,21 @@ def renku_2_make_server_name(user: AuthenticatedAPIUser | AnonymousAPIUser, proj safe_username = safe_username.lower() safe_username = re.sub(r"[^a-z0-9-]", "-", safe_username) prefix = _make_server_name_prefix(safe_username) - server_string_for_hashing = f"{user.id}-{project_id}-{launcher_id}" + server_string_for_hashing = f"{user.id}-{project_id}-{launcher_id}-{cluster_id}" server_hash = md5(server_string_for_hashing.encode(), usedforsecurity=False).hexdigest().lower() # NOTE: A K8s object name can only contain lowercase alphanumeric characters, hyphens, or dots. # Must be no more than 63 characters because the name is used to create a k8s Service and Services - # have more restrictions for their names beacuse their names have to make a valid hostname. + # have more restrictions for their names because their names have to make a valid hostname. # NOTE: We use server name as a label value, so, server name must be less than 63 characters. # !NOTE: For now we limit the server name to a max of 25 characters. # NOTE: This is 12 + 1 + 12 = 25 characters return f"{prefix[:12]}-{server_hash[:12]}" -def find_env_var(container: V1Container, env_name: str) -> tuple[int, str] | None: +def find_env_var(env_vars: list[Box], env_name: str) -> tuple[int, Box] | None: """Find the index and value of a specific environment variable by name from a Kubernetes container.""" - env_var = next( - filter( - lambda x: x[1].name == env_name, - enumerate(container.env), - ), - None, - ) - if not env_var: - return None - ind = env_var[0] - val = env_var[1].value - return ind, val + filtered = (env_var for env_var in enumerate(env_vars) if env_var[1].name == env_name) + return next(filtered, None) def _make_server_name_prefix(safe_username: str) -> str: @@ -110,8 +104,8 @@ def _make_server_name_prefix(safe_username: str) -> str: class PatchKind(StrEnum): """Content types for different json patches.""" - json: str = "application/json-patch+json" - merge: str = "application/merge-patch+json" + json = "application/json-patch+json" + merge = "application/merge-patch+json" def find_container(patches: list[Patch], container_name: str) -> dict[str, Any] | None: diff --git a/components/renku_data_services/notebooks/util/repository.py b/components/renku_data_services/notebooks/util/repository.py index 49fa42b82..bad3ff89a 100644 --- a/components/renku_data_services/notebooks/util/repository.py +++ b/components/renku_data_services/notebooks/util/repository.py @@ -3,7 +3,10 @@ from typing import Any, Optional, cast import requests -from sanic.log import logger + +from renku_data_services.app_config import logging + +logger = logging.getLogger(__name__) def get_status(hostname: str, server_name: str, access_token: Optional[str]) -> dict[str, Any]: diff --git a/components/renku_data_services/notebooks/utils.py b/components/renku_data_services/notebooks/utils.py index 05ddb118f..132abc136 100644 --- a/components/renku_data_services/notebooks/utils.py +++ b/components/renku_data_services/notebooks/utils.py @@ -1,10 +1,8 @@ """Utilities for notebooks.""" -import httpx - import renku_data_services.crc.models as crc_models -from renku_data_services.base_models.core import AuthenticatedAPIUser from renku_data_services.notebooks.crs import ( + Affinity, MatchExpression, NodeAffinity, NodeSelectorTerm, @@ -13,7 +11,6 @@ RequiredDuringSchedulingIgnoredDuringExecution, Toleration, ) -from renku_data_services.utils.cryptography import get_encryption_key def merge_node_affinities( @@ -57,9 +54,12 @@ def merge_node_affinities( return output -def node_affinity_from_resource_class(resource_class: crc_models.ResourceClass) -> NodeAffinity: +def node_affinity_from_resource_class( + resource_class: crc_models.ResourceClass, + default_affinity: Affinity, +) -> Affinity: """Generate an affinity from the affinities stored in a resource class.""" - output = NodeAffinity() + rc_node_affinity = NodeAffinity() required_expr = [ MatchExpression(key=affinity.key, operator="Exists") for affinity in resource_class.node_affinities @@ -71,17 +71,19 @@ def node_affinity_from_resource_class(resource_class: crc_models.ResourceClass) if not affinity.required_during_scheduling ] if required_expr: - output.requiredDuringSchedulingIgnoredDuringExecution = RequiredDuringSchedulingIgnoredDuringExecution( - nodeSelectorTerms=[ - # NOTE: Node selector terms are ORed by kubernetes - NodeSelectorTerm( - # NOTE: matchExpression terms are ANDed by kubernetes - matchExpressions=required_expr, - ) - ] + rc_node_affinity.requiredDuringSchedulingIgnoredDuringExecution = ( + RequiredDuringSchedulingIgnoredDuringExecution( + nodeSelectorTerms=[ + # NOTE: Node selector terms are ORed by kubernetes + NodeSelectorTerm( + # NOTE: matchExpression terms are ANDed by kubernetes + matchExpressions=required_expr, + ) + ] + ) ) if preferred_expr: - output.preferredDuringSchedulingIgnoredDuringExecution = [ + rc_node_affinity.preferredDuringSchedulingIgnoredDuringExecution = [ PreferredDuringSchedulingIgnoredDuringExecutionItem( weight=1, preference=Preference( @@ -90,27 +92,21 @@ def node_affinity_from_resource_class(resource_class: crc_models.ResourceClass) ), ) ] - return output + affinity = default_affinity.model_copy(deep=True) + if affinity.nodeAffinity: + affinity.nodeAffinity = merge_node_affinities(affinity.nodeAffinity, rc_node_affinity) + else: + affinity.nodeAffinity = rc_node_affinity + return affinity -def tolerations_from_resource_class(resource_class: crc_models.ResourceClass) -> list[Toleration]: + +def tolerations_from_resource_class( + resource_class: crc_models.ResourceClass, default_tolerations: list[Toleration] +) -> list[Toleration]: """Generate tolerations from the list of tolerations of a resource class.""" output: list[Toleration] = [] + output.extend(default_tolerations) for tol in resource_class.tolerations: output.append(Toleration(key=tol, operator="Exists")) return output - - -async def get_user_secret(data_svc_url: str, user: AuthenticatedAPIUser) -> str | None: - """Get the user secret key from the secret service.""" - - async with httpx.AsyncClient(timeout=5) as client: - response = await client.get( - f"{data_svc_url}/user/secret_key", - headers={"Authorization": f"Bearer {user.access_token}"}, - ) - if response.status_code != 200: - return None - user_key = response.json() - - return get_encryption_key(user_key["secret_key"].encode(), user.id.encode()).decode("utf-8") diff --git a/components/renku_data_services/platform/apispec.py b/components/renku_data_services/platform/apispec.py index efeb1840d..7fac7954c 100644 --- a/components/renku_data_services/platform/apispec.py +++ b/components/renku_data_services/platform/apispec.py @@ -1,21 +1,23 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-10-18T11:06:25+00:00 +# timestamp: 2025-03-19T10:21:15+00:00 from __future__ import annotations from typing import Optional -from pydantic import ConfigDict, Field +from pydantic import ConfigDict, Field, RootModel from renku_data_services.platform.apispec_base import BaseAPISpec class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): @@ -27,12 +29,12 @@ class PlatformConfig(BaseAPISpec): extra="forbid", ) etag: str = Field( - ..., description="Entity Tag", example="9EE498F9D565D0C41E511377425F32F3" + ..., description="Entity Tag", examples=["9EE498F9D565D0C41E511377425F32F3"] ) incident_banner: str = Field( ..., description="The contents of the incident banner.\nThis value accepts content written using Markdown.\n", - example="RenkuLab is experiencing issues, some features may be unavailable.", + examples=["RenkuLab is experiencing issues, some features may be unavailable."], ) @@ -43,5 +45,5 @@ class PlatformConfigPatch(BaseAPISpec): incident_banner: Optional[str] = Field( None, description="The contents of the incident banner.\nThis value accepts content written using Markdown.\n", - example="RenkuLab is experiencing issues, some features may be unavailable.", + examples=["RenkuLab is experiencing issues, some features may be unavailable."], ) diff --git a/components/renku_data_services/platform/models.py b/components/renku_data_services/platform/models.py index 04a31800b..59a933460 100644 --- a/components/renku_data_services/platform/models.py +++ b/components/renku_data_services/platform/models.py @@ -25,7 +25,7 @@ class PlatformConfig: @property def etag(self) -> str: """Entity tag value for this project object.""" - return compute_etag_from_timestamp(self.updated_at, include_quotes=True) + return compute_etag_from_timestamp(self.updated_at) @dataclass(frozen=True, eq=True, kw_only=True) diff --git a/components/renku_data_services/project/api.spec.yaml b/components/renku_data_services/project/api.spec.yaml index 31b4b8082..276f27668 100644 --- a/components/renku_data_services/project/api.spec.yaml +++ b/components/renku_data_services/project/api.spec.yaml @@ -148,6 +148,79 @@ paths: $ref: "#/components/responses/Error" tags: - projects + /renku_v1_projects/migrations: + get: + summary: Return list of projects migrated from v1 + responses: + "200": + description: List of project migrations + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectMigrationList" + "404": + description: No exist project migration + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - projects_migrations + /renku_v1_projects/{v1_id}/migrations: + get: + summary: Check if a v1 project has been migrated to v2 + parameters: + - in: path + name: v1_id + required: true + description: The ID of the project in Renku v1 + schema: + type: integer + responses: + "200": + description: Project exists in v2 and has been migrated + content: + application/json: + schema: + $ref: "#/components/schemas/Project" + "404": + description: No corresponding project found in v2 + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - projects + post: + summary: Create a new project migrated from Renku v1 + parameters: + - in: path + name: v1_id + required: true + description: The ID of the Gitlab repository that represents the project in Renku v1 + schema: + type: integer + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectMigrationPost" + responses: + "201": + description: The project was created + content: + application/json: + schema: + $ref: "#/components/schemas/Project" + default: + $ref: "#/components/responses/Error" + tags: + - projects /namespaces/{namespace}/projects/{slug}: get: summary: Get a project by namespace and project slug @@ -254,6 +327,32 @@ paths: $ref: "#/components/responses/Error" tags: - projects + /projects/{project_id}/migration_info: + get: + summary: Check if a v2 project is a project migrated from v1 + parameters: + - in: path + name: project_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + responses: + "200": + description: Project exists in v2 and is a migrated project from v1 + content: + application/json: + schema: + $ref: "#/components/schemas/ProjectMigrationInfo" + "404": + description: No corresponding project migrated from v1 + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - projects /projects/{project_id}/members: get: summary: Get all members of a project @@ -364,27 +463,169 @@ paths: $ref: "#/components/responses/Error" tags: - projects - /projects/{project_id}/data_connector_links: + /projects/{project_id}/session_secret_slots: parameters: - in: path name: project_id required: true schema: $ref: "#/components/schemas/Ulid" - description: the ID of the project get: - summary: Get all links from data connectors to a given project + summary: Get the session secret slots of a project responses: "200": - description: List of data connector to project links + description: The list of session secret slots + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretSlotList" + "404": + description: The project does not exist + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - projects + /projects/{project_id}/session_secrets: + parameters: + - in: path + name: project_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + get: + summary: Get the current user's secrets of a project + responses: + "200": + description: The list of secrets + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretList" + "404": + description: The project does not exist + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - projects + patch: + summary: Save user secrets for a project + description: New secrets will be added and existing secrets will have their value updated. Using `null` as a value will remove the corresponding secret. + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretPatchList" + responses: + "201": + description: The secrets for the project were saved content: application/json: schema: - $ref: "#/components/schemas/DataConnectorToProjectLinksList" + $ref: "#/components/schemas/SessionSecretList" default: $ref: "#/components/responses/Error" tags: - projects + delete: + summary: Remove all user secrets for a project + responses: + "204": + description: The secrets were removed or did not exist in the first place or the project doesn't exist + default: + $ref: "#/components/responses/Error" + tags: + - projects + /session_secret_slots: + post: + summary: Create a new session secret slot + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretSlotPost" + responses: + "201": + description: The secret slot was created + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretSlot" + default: + $ref: "#/components/responses/Error" + tags: + - session_secret_slots + /session_secret_slots/{slot_id}: + parameters: + - in: path + name: slot_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + get: + summary: Get the details of a session secret slot + responses: + "200": + description: The session secret slot + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretSlot" + "404": + description: The secret slot does not exist + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - session_secret_slots + patch: + summary: Update specific fields of an existing session secret slot + parameters: + - $ref: "#/components/parameters/If-Match" + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretSlotPatch" + responses: + "200": + description: The patched session secret slot + content: + application/json: + schema: + $ref: "#/components/schemas/SessionSecretSlot" + "404": + description: The session secret slot does not exist + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - session_secret_slots + delete: + summary: Remove a session secret slot + responses: + "204": + description: The session secret slot was removed or did not exist in the first place + default: + $ref: "#/components/responses/Error" + tags: + - session_secret_slots components: schemas: ProjectsList: @@ -402,9 +643,9 @@ components: name: $ref: "#/components/schemas/ProjectName" namespace: - $ref: "#/components/schemas/Slug" + $ref: "#/components/schemas/SlugResponse" slug: - $ref: "#/components/schemas/LegacySlug" + $ref: "#/components/schemas/SlugResponse" creation_date: $ref: "#/components/schemas/CreationDate" created_by: @@ -428,6 +669,8 @@ components: is_template: $ref: "#/components/schemas/IsTemplate" default: False + secrets_mount_directory: + $ref: "#/components/schemas/SecretsMountDirectory" required: - "id" - "name" @@ -436,6 +679,7 @@ components: - "created_by" - "creation_date" - "visibility" + - "secrets_mount_directory" example: id: "01AN4Z79ZS5XN0F25N3DB94T4R" name: "Renku R Project" @@ -451,6 +695,7 @@ components: - "keyword 1" - "keyword 2" template_id: "01JC3CB5426KC7P5STS5X3KSS8" + secrets_mount_directory: "/secrets" ProjectPost: description: Project metadata to be created in Renku type: object @@ -475,6 +720,8 @@ components: $ref: "#/components/schemas/KeywordsList" documentation: $ref: "#/components/schemas/ProjectDocumentation" + secrets_mount_directory: + $ref: "#/components/schemas/SecretsMountDirectory" required: - name - namespace @@ -487,6 +734,8 @@ components: $ref: "#/components/schemas/ProjectName" namespace: $ref: "#/components/schemas/Slug" + slug: + $ref: "#/components/schemas/Slug" repositories: $ref: "#/components/schemas/RepositoriesList" visibility: @@ -508,6 +757,69 @@ components: maxLength: 0 is_template: $ref: "#/components/schemas/IsTemplate" + secrets_mount_directory: + $ref: "#/components/schemas/SecretsMountDirectoryPatch" + ProjectMigrationPost: + description: Project v1 data to be migrated in Renku + type: object + additionalProperties: false + properties: + project: + $ref: "#/components/schemas/ProjectPost" + session_launcher: + $ref: "#/components/schemas/MigrationSessionLauncherPost" + required: + - "project" + MigrationSessionLauncherPost: + description: Data required to create a session launcher for a project migrated + type: object + additionalProperties: false + properties: + name: + $ref: "#/components/schemas/SessionName" + container_image: + $ref: "#/components/schemas/ContainerImage" + default_url: + allOf: + - $ref: "#/components/schemas/DefaultUrl" + - default: /lab + default: /lab + resource_class_id: + $ref: "#/components/schemas/ResourceClassId" + disk_storage: + $ref: "#/components/schemas/DiskStorage" + required: + - name + - container_image + SessionName: + description: Renku session name + type: string + minLength: 1 + maxLength: 99 + example: My Renku Session :) + ContainerImage: + description: A container image + type: string + maxLength: 500 + # NOTE: regex for an image name, optionally with a tag or sha256 specified + # based on https://github.com/opencontainers/distribution-spec/blob/main/spec.md + pattern: "^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$" + example: renku/renkulab-py:3.10-0.18.1 + DefaultUrl: + description: The default path to open in a session + type: string + maxLength: 200 + example: "/lab" + ResourceClassId: + description: The identifier of a resource class + type: integer + default: null + nullable: true + DiskStorage: + description: The size of disk storage for the session, in gigabytes + type: integer + minimum: 1 + example: 8 Ulid: description: ULID identifier type: string @@ -523,8 +835,7 @@ components: minLength: 1 maxLength: 99 example: "My Renku Project :)" - LegacySlug: - # Will be deprecated when we migrate all namespaces to have only lowercase chars + Slug: description: A command-line/url friendly name for a namespace type: string minLength: 1 @@ -535,21 +846,13 @@ components: # - cannot end in .atom # - cannot contain any combination of two or more consecutive -._ # - has to start with letter or number - pattern: '^(?!.*\.git$|.*\.atom$|.*[\-._][\-._].*)[a-zA-Z0-9][a-zA-Z0-9\-_.]*$' + # - cannot contain uppercase characters + pattern: '^(?!.*\.git$|.*\.atom$|.*[\-._][\-._].*)[a-z0-9][a-z0-9\-_.]*$' example: "a-slug-example" - Slug: + SlugResponse: description: A command-line/url friendly name for a namespace type: string minLength: 1 - maxLength: 99 - # Slug regex rules - # from https://docs.gitlab.com/ee/user/reserved_names.html#limitations-on-usernames-project-and-group-names - # - cannot end in .git - # - cannot end in .atom - # - cannot contain any combination of two or more consecutive -._ - # - has to start with letter or number - # - cannot contain uppercase characters - pattern: '^(?!.*\.git$|.*\.atom$|.*[\-._][\-._].*)[a-z0-9][a-z0-9\-_.]*$' example: "a-slug-example" CreationDate: description: The date and time the resource was created (in UTC and ISO-8601 format) @@ -570,7 +873,6 @@ components: type: string minLength: 1 maxLength: 99 - pattern: '^[A-Za-z0-9\s\-_.]*$' KeywordsList: description: Project keywords type: array @@ -596,9 +898,8 @@ components: - https://github.com/SwissDataScienceCenter/project-1.git - git@github.com:SwissDataScienceCenter/project-2.git Repository: - description: A project's repository + description: A git repository URL type: string - example: git@github.com:SwissDataScienceCenter/project-1.git Visibility: description: Project's visibility levels type: string @@ -608,6 +909,17 @@ components: IsTemplate: description: Shows if a project is a template or not type: boolean + SecretsMountDirectory: + description: | + The location where the secrets will be provided inside sessions, if left unset it will default to `/secrets`. + Relative locations are supported and will be mounted relative to the session environment's mount directory. + type: string + minLength: 1 + default: "/secrets" + example: "/secrets" + SecretsMountDirectoryPatch: + type: string + example: "/secrets" ProjectMemberListPatchRequest: description: List of members and their access level to the project type: array @@ -648,7 +960,7 @@ components: id: $ref: "#/components/schemas/UserId" namespace: - $ref: "#/components/schemas/LegacySlug" + $ref: "#/components/schemas/SlugResponse" first_name: $ref: "#/components/schemas/UserFirstLastName" last_name: @@ -719,6 +1031,26 @@ components: description: A flag to filter projects where the user is a direct member. type: boolean default: false + ProjectMigrationList: + description: A list of project migrations + type: array + items: + $ref: "#/components/schemas/ProjectMigrationInfo" + minItems: 0 + ProjectMigrationInfo: + description: Information if a project is a migrated project + type: object + properties: + project_id: + $ref: "#/components/schemas/Ulid" + v1_id: + description: The id of the project in v1 + type: integer + launcher_id: + $ref: "#/components/schemas/Ulid" + required: + - v1_id + - project_id ProjectPermissions: description: The set of permissions on a project type: object @@ -732,6 +1064,126 @@ components: change_membership: description: The user can manage project members type: boolean + SessionSecretSlotList: + description: A list of session secret slots + type: array + items: + $ref: "#/components/schemas/SessionSecretSlot" + SessionSecretSlot: + description: A slot for a secret in a session + type: object + additionalProperties: false + properties: + id: + $ref: "#/components/schemas/Ulid" + project_id: + $ref: "#/components/schemas/Ulid" + name: + $ref: "#/components/schemas/SecretSlotName" + description: + $ref: "#/components/schemas/Description" + filename: + $ref: "#/components/schemas/SecretSlotFileName" + etag: + $ref: "#/components/schemas/ETag" + required: + - id + - project_id + - name + - filename + - etag + SessionSecretSlotPost: + type: object + additionalProperties: false + properties: + project_id: + $ref: "#/components/schemas/Ulid" + name: + $ref: "#/components/schemas/SecretSlotName" + description: + $ref: "#/components/schemas/Description" + filename: + $ref: "#/components/schemas/SecretSlotFileName" + required: + - project_id + - filename + SessionSecretSlotPatch: + type: object + additionalProperties: false + properties: + name: + $ref: "#/components/schemas/SecretSlotName" + description: + $ref: "#/components/schemas/Description" + filename: + $ref: "#/components/schemas/SecretSlotFileName" + SessionSecretList: + description: A list of session launcher secrets + type: array + items: + $ref: "#/components/schemas/SessionSecret" + SessionSecret: + description: A user's secret in a session launcher + type: object + additionalProperties: false + properties: + secret_slot: + $ref: "#/components/schemas/SessionSecretSlot" + secret_id: + $ref: "#/components/schemas/Ulid" + required: + - secret_slot + - secret_id + SessionSecretPatchList: + type: array + items: + $ref: "#/components/schemas/SessionSecretPatch" + SessionSecretPatch: + allOf: + - type: object + additionalProperties: false + properties: + secret_slot_id: + $ref: "#/components/schemas/Ulid" + required: + - secret_slot_id + - oneOf: + - $ref: "#/components/schemas/SessionSecretPatchExistingSecret" + - $ref: "#/components/schemas/SessionSecretPatchSecretValue" + SessionSecretPatchExistingSecret: + type: object + additionalProperties: false + properties: + secret_id: + $ref: "#/components/schemas/Ulid" + required: + - secret_id + SessionSecretPatchSecretValue: + type: object + additionalProperties: false + properties: + value: + $ref: "#/components/schemas/SecretValueNullable" + SecretSlotName: + description: The name of a secret slot + type: string + minLength: 1 + maxLength: 99 + example: API Token + SecretSlotFileName: + description: The filename given to the corresponding secret in the session + type: string + minLength: 1 + maxLength: 200 + pattern: "^[a-zA-Z0-9_\\-.]+$" + example: api_token + SecretValueNullable: + description: Secret value that can be any text + type: string + minLength: 1 + maxLength: 5000 + nullable: true + example: My secret value PaginationRequest: type: object additionalProperties: false diff --git a/components/renku_data_services/project/apispec.py b/components/renku_data_services/project/apispec.py index a519c4f6e..6bb149cb3 100644 --- a/components/renku_data_services/project/apispec.py +++ b/components/renku_data_services/project/apispec.py @@ -1,25 +1,19 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-11-25T14:22:47+00:00 +# timestamp: 2025-05-06T08:33:41+00:00 from __future__ import annotations from datetime import datetime from enum import Enum -from typing import List, Optional +from typing import List, Optional, Union from pydantic import ConfigDict, Field, RootModel from renku_data_services.project.apispec_base import BaseAPISpec class Keyword(RootModel[str]): - root: str = Field( - ..., - description="A single keyword", - max_length=99, - min_length=1, - pattern="^[A-Za-z0-9\\s\\-_.]*$", - ) + root: str = Field(..., description="A single keyword", max_length=99, min_length=1) class Visibility(Enum): @@ -61,16 +55,34 @@ class DataConnectorToProjectLink(BaseAPISpec): creation_date: datetime = Field( ..., description="The date and time the resource was created (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", + examples=["2023-11-01T17:32:28Z"], ) created_by: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) +class ProjectMigrationInfo(BaseAPISpec): + project_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + v1_id: int = Field(..., description="The id of the project in v1") + launcher_id: Optional[str] = Field( + None, + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + + class ProjectPermissions(BaseAPISpec): write: Optional[bool] = Field(None, description="The user can edit the project") delete: Optional[bool] = Field(None, description="The user can delete the project") @@ -79,6 +91,29 @@ class ProjectPermissions(BaseAPISpec): ) +class SessionSecretPatch1(BaseAPISpec): + secret_slot_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + + +class SessionSecretPatchExistingSecret(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + secret_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + + class PaginationRequest(BaseAPISpec): model_config = ConfigDict( extra="forbid", @@ -90,31 +125,68 @@ class PaginationRequest(BaseAPISpec): class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): error: Error -class ProjectsProjectIdGetParametersQuery(BaseAPISpec): +class NamespacesNamespaceProjectsSlugGetParametersQuery(BaseAPISpec): with_documentation: Optional[bool] = Field( None, description="Projects with or without possibly extensive documentation?" ) -class NamespacesNamespaceProjectsSlugGetParametersQuery(BaseAPISpec): +class ProjectsProjectIdCopiesGetParametersQuery(BaseAPISpec): + writable: bool = False + + +class ProjectsProjectIdGetParametersQuery(BaseAPISpec): with_documentation: Optional[bool] = Field( None, description="Projects with or without possibly extensive documentation?" ) -class ProjectsProjectIdCopiesGetParametersQuery(BaseAPISpec): - writable: bool = False +class MigrationSessionLauncherPost(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + name: str = Field( + ..., + description="Renku session name", + examples=["My Renku Session :)"], + max_length=99, + min_length=1, + ) + container_image: str = Field( + ..., + description="A container image", + examples=["renku/renkulab-py:3.10-0.18.1"], + max_length=500, + pattern="^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$", + ) + default_url: str = Field( + "/lab", + description="The default path to open in a session", + examples=["/lab"], + max_length=200, + ) + resource_class_id: Optional[int] = Field( + None, description="The identifier of a resource class" + ) + disk_storage: Optional[int] = Field( + None, + description="The size of disk storage for the session, in gigabytes", + examples=[8], + ge=1, + ) class ProjectMemberPatchRequest(BaseAPISpec): @@ -124,7 +196,7 @@ class ProjectMemberPatchRequest(BaseAPISpec): id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) role: Role @@ -137,28 +209,26 @@ class ProjectMemberResponse(BaseAPISpec): id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) namespace: Optional[str] = Field( None, description="A command-line/url friendly name for a namespace", - example="a-slug-example", - max_length=99, + examples=["a-slug-example"], min_length=1, - pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-zA-Z0-9][a-zA-Z0-9\\-_.]*$", ) first_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) last_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) @@ -179,10 +249,148 @@ class ProjectGetQuery(PaginationRequest): ) +class ProjectMigrationList(RootModel[List[ProjectMigrationInfo]]): + root: List[ProjectMigrationInfo] = Field( + ..., description="A list of project migrations", min_length=0 + ) + + +class SessionSecretSlot(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + project_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + name: str = Field( + ..., + description="The name of a secret slot", + examples=["API Token"], + max_length=99, + min_length=1, + ) + description: Optional[str] = Field( + None, description="A description for the resource", max_length=500 + ) + filename: str = Field( + ..., + description="The filename given to the corresponding secret in the session", + examples=["api_token"], + max_length=200, + min_length=1, + pattern="^[a-zA-Z0-9_\\-.]+$", + ) + etag: str = Field( + ..., description="Entity Tag", examples=["9EE498F9D565D0C41E511377425F32F3"] + ) + + +class SessionSecretSlotPost(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + project_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + name: Optional[str] = Field( + None, + description="The name of a secret slot", + examples=["API Token"], + max_length=99, + min_length=1, + ) + description: Optional[str] = Field( + None, description="A description for the resource", max_length=500 + ) + filename: str = Field( + ..., + description="The filename given to the corresponding secret in the session", + examples=["api_token"], + max_length=200, + min_length=1, + pattern="^[a-zA-Z0-9_\\-.]+$", + ) + + +class SessionSecretSlotPatch(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + name: Optional[str] = Field( + None, + description="The name of a secret slot", + examples=["API Token"], + max_length=99, + min_length=1, + ) + description: Optional[str] = Field( + None, description="A description for the resource", max_length=500 + ) + filename: Optional[str] = Field( + None, + description="The filename given to the corresponding secret in the session", + examples=["api_token"], + max_length=200, + min_length=1, + pattern="^[a-zA-Z0-9_\\-.]+$", + ) + + +class SessionSecret(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + secret_slot: SessionSecretSlot + secret_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + + +class SessionSecretPatch2(SessionSecretPatchExistingSecret, SessionSecretPatch1): + pass + + +class SessionSecretPatchSecretValue(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + value: Optional[str] = Field( + None, + description="Secret value that can be any text", + examples=["My secret value"], + max_length=5000, + min_length=1, + ) + + class ProjectsGetParametersQuery(BaseAPISpec): params: Optional[ProjectGetQuery] = None +class RenkuV1ProjectsMigrationsGetParametersQuery(BaseAPISpec): + """This class no longer includes any parameters.""" + pass + + class Project(BaseAPISpec): id: str = Field( ..., @@ -194,48 +402,46 @@ class Project(BaseAPISpec): name: str = Field( ..., description="Renku project name", - example="My Renku Project :)", + examples=["My Renku Project :)"], max_length=99, min_length=1, ) namespace: str = Field( ..., description="A command-line/url friendly name for a namespace", - example="a-slug-example", - max_length=99, + examples=["a-slug-example"], min_length=1, - pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", ) slug: str = Field( ..., description="A command-line/url friendly name for a namespace", - example="a-slug-example", - max_length=99, + examples=["a-slug-example"], min_length=1, - pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-zA-Z0-9][a-zA-Z0-9\\-_.]*$", ) creation_date: datetime = Field( ..., description="The date and time the resource was created (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", + examples=["2023-11-01T17:32:28Z"], ) created_by: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) updated_at: Optional[datetime] = Field( None, description="The date and time the resource was updated (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", + examples=["2023-11-01T17:32:28Z"], ) repositories: Optional[List[str]] = Field( None, description="A list of repositories", - example=[ - "https://github.com/SwissDataScienceCenter/project-1.git", - "git@github.com:SwissDataScienceCenter/project-2.git", + examples=[ + [ + "https://github.com/SwissDataScienceCenter/project-1.git", + "git@github.com:SwissDataScienceCenter/project-2.git", + ] ], min_length=0, ) @@ -244,18 +450,18 @@ class Project(BaseAPISpec): None, description="A description for the resource", max_length=500 ) etag: Optional[str] = Field( - None, description="Entity Tag", example="9EE498F9D565D0C41E511377425F32F3" + None, description="Entity Tag", examples=["9EE498F9D565D0C41E511377425F32F3"] ) keywords: Optional[List[Keyword]] = Field( None, description="Project keywords", - example=["project", "keywords"], + examples=[["project", "keywords"]], min_length=0, ) documentation: Optional[str] = Field( None, description="Renku project documentation", - example="My Renku Project Documentation :)", + examples=["My Renku Project Documentation :)"], max_length=5000, min_length=0, ) @@ -269,6 +475,12 @@ class Project(BaseAPISpec): is_template: bool = Field( False, description="Shows if a project is a template or not" ) + secrets_mount_directory: str = Field( + ..., + description="The location where the secrets will be provided inside sessions, if left unset it will default to `/secrets`.\nRelative locations are supported and will be mounted relative to the session environment's mount directory.\n", + examples=["/secrets"], + min_length=1, + ) class ProjectPost(BaseAPISpec): @@ -278,14 +490,14 @@ class ProjectPost(BaseAPISpec): name: str = Field( ..., description="Renku project name", - example="My Renku Project :)", + examples=["My Renku Project :)"], max_length=99, min_length=1, ) namespace: str = Field( ..., description="A command-line/url friendly name for a namespace", - example="a-slug-example", + examples=["a-slug-example"], max_length=99, min_length=1, pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", @@ -293,7 +505,7 @@ class ProjectPost(BaseAPISpec): slug: Optional[str] = Field( None, description="A command-line/url friendly name for a namespace", - example="a-slug-example", + examples=["a-slug-example"], max_length=99, min_length=1, pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", @@ -301,9 +513,11 @@ class ProjectPost(BaseAPISpec): repositories: Optional[List[str]] = Field( None, description="A list of repositories", - example=[ - "https://github.com/SwissDataScienceCenter/project-1.git", - "git@github.com:SwissDataScienceCenter/project-2.git", + examples=[ + [ + "https://github.com/SwissDataScienceCenter/project-1.git", + "git@github.com:SwissDataScienceCenter/project-2.git", + ] ], min_length=0, ) @@ -314,16 +528,22 @@ class ProjectPost(BaseAPISpec): keywords: Optional[List[Keyword]] = Field( None, description="Project keywords", - example=["project", "keywords"], + examples=[["project", "keywords"]], min_length=0, ) documentation: Optional[str] = Field( None, description="Renku project documentation", - example="My Renku Project Documentation :)", + examples=["My Renku Project Documentation :)"], max_length=5000, min_length=0, ) + secrets_mount_directory: Optional[str] = Field( + "/secrets", + description="The location where the secrets will be provided inside sessions, if left unset it will default to `/secrets`.\nRelative locations are supported and will be mounted relative to the session environment's mount directory.\n", + examples=["/secrets"], + min_length=1, + ) class ProjectPatch(BaseAPISpec): @@ -333,14 +553,22 @@ class ProjectPatch(BaseAPISpec): name: Optional[str] = Field( None, description="Renku project name", - example="My Renku Project :)", + examples=["My Renku Project :)"], max_length=99, min_length=1, ) namespace: Optional[str] = Field( None, description="A command-line/url friendly name for a namespace", - example="a-slug-example", + examples=["a-slug-example"], + max_length=99, + min_length=1, + pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", + ) + slug: Optional[str] = Field( + None, + description="A command-line/url friendly name for a namespace", + examples=["a-slug-example"], max_length=99, min_length=1, pattern="^(?!.*\\.git$|.*\\.atom$|.*[\\-._][\\-._].*)[a-z0-9][a-z0-9\\-_.]*$", @@ -348,9 +576,11 @@ class ProjectPatch(BaseAPISpec): repositories: Optional[List[str]] = Field( None, description="A list of repositories", - example=[ - "https://github.com/SwissDataScienceCenter/project-1.git", - "git@github.com:SwissDataScienceCenter/project-2.git", + examples=[ + [ + "https://github.com/SwissDataScienceCenter/project-1.git", + "git@github.com:SwissDataScienceCenter/project-2.git", + ] ], min_length=0, ) @@ -361,13 +591,13 @@ class ProjectPatch(BaseAPISpec): keywords: Optional[List[Keyword]] = Field( None, description="Project keywords", - example=["project", "keywords"], + examples=[["project", "keywords"]], min_length=0, ) documentation: Optional[str] = Field( None, description="Renku project documentation", - example="My Renku Project Documentation :)", + examples=["My Renku Project Documentation :)"], max_length=5000, min_length=0, ) @@ -380,15 +610,26 @@ class ProjectPatch(BaseAPISpec): is_template: Optional[bool] = Field( None, description="Shows if a project is a template or not" ) + secrets_mount_directory: Optional[str] = Field(None, examples=["/secrets"]) + + +class ProjectMigrationPost(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + project: ProjectPost + session_launcher: Optional[MigrationSessionLauncherPost] = None class ProjectMemberListPatchRequest(RootModel[List[ProjectMemberPatchRequest]]): root: List[ProjectMemberPatchRequest] = Field( ..., description="List of members and their access level to the project", - example=[ - {"id": "some-keycloak-user-id", "role": "owner"}, - {"id": "another-keycloak-user-id", "role": "viewer"}, + examples=[ + [ + {"id": "some-keycloak-user-id", "role": "owner"}, + {"id": "another-keycloak-user-id", "role": "viewer"}, + ] ], min_length=0, ) @@ -402,7 +643,29 @@ class ProjectMemberListResponse(RootModel[List[ProjectMemberResponse]]): ) +class SessionSecretSlotList(RootModel[List[SessionSecretSlot]]): + root: List[SessionSecretSlot] = Field( + ..., description="A list of session secret slots" + ) + + +class SessionSecretList(RootModel[List[SessionSecret]]): + root: List[SessionSecret] = Field( + ..., description="A list of session launcher secrets" + ) + + +class SessionSecretPatch3(SessionSecretPatchSecretValue, SessionSecretPatch1): + pass + + class ProjectsList(RootModel[List[Project]]): root: List[Project] = Field( ..., description="A list of Renku projects", min_length=0 ) + + +class SessionSecretPatchList( + RootModel[List[Union[SessionSecretPatch2, SessionSecretPatch3]]] +): + root: List[Union[SessionSecretPatch2, SessionSecretPatch3]] diff --git a/components/renku_data_services/project/apispec_base.py b/components/renku_data_services/project/apispec_base.py index f43da8ec0..e74d180cb 100644 --- a/components/renku_data_services/project/apispec_base.py +++ b/components/renku_data_services/project/apispec_base.py @@ -1,5 +1,7 @@ """Base models for API specifications.""" +from typing import Any + from pydantic import BaseModel, field_validator from ulid import ULID @@ -15,8 +17,10 @@ class Config: # this rust crate does not support lookahead regex syntax but we need it in this component regex_engine = "python-re" - @field_validator("id", "template_id", mode="before", check_fields=False) + @field_validator("*", mode="before", check_fields=False) @classmethod - def serialize_ulid_fields(cls, value: str | ULID | None) -> str | None: - """Custom serializer that can handle ULIDs.""" - return None if value is None else str(value) + def serialize_ulid(cls, value: Any) -> Any: + """Handle ULIDs.""" + if isinstance(value, ULID): + return str(value) + return value diff --git a/components/renku_data_services/project/blueprints.py b/components/renku_data_services/project/blueprints.py index 6c3d7f5fc..712df1512 100644 --- a/components/renku_data_services/project/blueprints.py +++ b/components/renku_data_services/project/blueprints.py @@ -9,7 +9,7 @@ from ulid import ULID import renku_data_services.base_models as base_models -from renku_data_services.authz.models import Member, Role, Visibility +from renku_data_services.authz.models import Change, Member, Role, Visibility from renku_data_services.base_api.auth import ( authenticate, only_authenticated, @@ -19,13 +19,27 @@ from renku_data_services.base_api.etag import extract_if_none_match, if_match_required from renku_data_services.base_api.misc import validate_body_root_model, validate_query from renku_data_services.base_api.pagination import PaginationRequest, paginate +from renku_data_services.base_models.core import Slug +from renku_data_services.base_models.metrics import MetricsService, ProjectCreationType from renku_data_services.base_models.validation import validate_and_dump, validated_json -from renku_data_services.data_connectors.db import DataConnectorProjectLinkRepository +from renku_data_services.data_connectors.db import DataConnectorRepository from renku_data_services.errors import errors from renku_data_services.project import apispec from renku_data_services.project import models as project_models -from renku_data_services.project.core import copy_project, validate_project_patch -from renku_data_services.project.db import ProjectMemberRepository, ProjectRepository +from renku_data_services.project.core import ( + copy_project, + validate_project_patch, + validate_session_secret_slot_patch, + validate_session_secrets_patch, + validate_unsaved_project, + validate_unsaved_session_secret_slot, +) +from renku_data_services.project.db import ( + ProjectMemberRepository, + ProjectMigrationRepository, + ProjectRepository, + ProjectSessionSecretRepository, +) from renku_data_services.session.db import SessionRepository from renku_data_services.users.db import UserRepo @@ -39,7 +53,9 @@ class ProjectsBP(CustomBlueprint): user_repo: UserRepo authenticator: base_models.Authenticator session_repo: SessionRepository - data_connector_to_project_link_repo: DataConnectorProjectLinkRepository + data_connector_repo: DataConnectorRepository + project_migration_repo: ProjectMigrationRepository + metrics: MetricsService def get_all(self) -> BlueprintFactoryResponse: """List all projects.""" @@ -64,24 +80,85 @@ def post(self) -> BlueprintFactoryResponse: @only_authenticated @validate(json=apispec.ProjectPost) async def _post(_: Request, user: base_models.APIUser, body: apispec.ProjectPost) -> JSONResponse: - keywords = [kw.root for kw in body.keywords] if body.keywords is not None else [] - visibility = Visibility.PRIVATE if body.visibility is None else Visibility(body.visibility.value) - project = project_models.UnsavedProject( - name=body.name, - namespace=body.namespace, - slug=body.slug or base_models.Slug.from_name(body.name).value, - description=body.description, - repositories=body.repositories or [], - created_by=user.id, # type: ignore[arg-type] - visibility=visibility, - keywords=keywords, - documentation=body.documentation, - ) - result = await self.project_repo.insert_project(user, project) + new_project = validate_unsaved_project(body, created_by=user.id or "") + result = await self.project_repo.insert_project(user, new_project) + await self.metrics.project_created(user, metadata={"project_creation_kind": ProjectCreationType.new.value}) + if len(result.repositories) > 0: + await self.metrics.code_repo_linked_to_project(user) return validated_json(apispec.Project, self._dump_project(result), status=201) return "/projects", ["POST"], _post + def get_all_migrations(self) -> BlueprintFactoryResponse: + """List all project migrations.""" + + @authenticate(self.authenticator) + @only_authenticated + async def _get_all_migrations(_: Request, user: base_models.APIUser) -> JSONResponse: + project_migrations = self.project_migration_repo.get_project_migrations(user=user) + + migrations_list = [] + async for migration in project_migrations: + migrations_list.append(self._dump_project_migration(migration)) + + return validated_json(apispec.ProjectMigrationList, migrations_list) + + return "/renku_v1_projects/migrations", ["GET"], _get_all_migrations + + def get_migration(self) -> BlueprintFactoryResponse: + """Get project migration by project v1 id.""" + + @authenticate(self.authenticator) + async def _get_migration(_: Request, user: base_models.APIUser, v1_id: int) -> JSONResponse: + project = await self.project_migration_repo.get_migration_by_v1_id(user, v1_id) + project_dump = self._dump_project(project) + return validated_json(apispec.Project, project_dump) + + return "/renku_v1_projects//migrations", ["GET"], _get_migration + + def post_migration(self) -> BlueprintFactoryResponse: + """Migrate v1 project.""" + + @authenticate(self.authenticator) + @only_authenticated + @validate(json=apispec.ProjectMigrationPost) + async def _post_migration( + _: Request, user: base_models.APIUser, v1_id: int, body: apispec.ProjectMigrationPost + ) -> JSONResponse: + new_project = validate_unsaved_project(body.project, created_by=user.id or "") + + result = await self.project_migration_repo.migrate_v1_project( + user, project=new_project, project_v1_id=v1_id, session_launcher=body.session_launcher + ) + await self.metrics.project_created( + user, metadata={"project_creation_kind": ProjectCreationType.migrated.value} + ) + return validated_json(apispec.Project, self._dump_project(result), status=201) + + return "/renku_v1_projects//migrations", ["POST"], _post_migration + + def get_project_migration_info(self) -> BlueprintFactoryResponse: + """Get project migration by project v2 id.""" + + @authenticate(self.authenticator) + @only_authenticated + async def _get_project_migration_info( + _: Request, user: base_models.APIUser, project_id: ULID + ) -> JSONResponse | HTTPResponse: + migration_info = await self.project_migration_repo.get_migration_by_project_id(user, project_id) + + if migration_info and isinstance(migration_info, project_models.ProjectMigrationInfo): + dump_migration_info = dict( + project_id=migration_info.project_id, + v1_id=migration_info.v1_id, + launcher_id=migration_info.launcher_id, + ) + return validated_json(apispec.ProjectMigrationInfo, dump_migration_info) + + return HTTPResponse(status=404) + + return "/projects//migration_info", ["GET"], _get_project_migration_info + def copy(self) -> BlueprintFactoryResponse: """Create a new project by copying it from a template project.""" @@ -92,7 +169,7 @@ async def _copy( _: Request, user: base_models.APIUser, project_id: ULID, body: apispec.ProjectPost ) -> JSONResponse: project = await copy_project( - project_id=project_id, + source_project_id=project_id, user=user, name=body.name, namespace=body.namespace, @@ -101,9 +178,13 @@ async def _copy( repositories=body.repositories, visibility=Visibility(body.visibility.value) if body.visibility is not None else None, keywords=[kw.root for kw in body.keywords] if body.keywords is not None else [], + secrets_mount_directory=body.secrets_mount_directory, project_repo=self.project_repo, session_repo=self.session_repo, - data_connector_to_project_link_repo=self.data_connector_to_project_link_repo, + data_connector_repo=self.data_connector_repo, + ) + await self.metrics.project_created( + user, metadata={"project_creation_kind": ProjectCreationType.copied.value} ) return validated_json(apispec.Project, self._dump_project(project), status=201) @@ -167,7 +248,7 @@ async def _get_one_by_namespace_slug( _: Request, user: base_models.APIUser, namespace: str, - slug: str, + slug: Slug, etag: str | None, query: apispec.NamespacesNamespaceProjectsSlugGetParametersQuery, ) -> JSONResponse | HTTPResponse: @@ -219,6 +300,8 @@ async def _patch( message="Expected the result of a project update to be ProjectUpdate but instead " f"got {type(project_update)}" ) + if len(project_update.new.repositories) > len(project_update.old.repositories): + await self.metrics.code_repo_linked_to_project(user) updated_project = project_update.new return validated_json(apispec.Project, self._dump_project(updated_project)) @@ -243,7 +326,7 @@ async def _get_all_members(_: Request, user: base_models.APIUser, project_id: UL user_with_id = apispec.ProjectMemberResponse( id=user_id, - namespace=namespace_info.slug, + namespace=namespace_info.path.first.value, first_name=user_info.first_name, last_name=user_info.last_name, role=apispec.Role(member.role.value), @@ -263,7 +346,11 @@ async def _update_members( _: Request, user: base_models.APIUser, project_id: ULID, body: apispec.ProjectMemberListPatchRequest ) -> HTTPResponse: members = [Member(Role(i.role.value), i.id, project_id) for i in body.root] - await self.project_member_repo.update_members(user, project_id, members) + result = await self.project_member_repo.update_members(user, project_id, members) + + if any(c.change == Change.ADD for c in result): + await self.metrics.project_member_added(user) + return HTTPResponse(status=200) return "/projects//members", ["PATCH"], _update_members @@ -297,7 +384,7 @@ def _dump_project(project: project_models.Project, with_documentation: bool = Fa result = dict( id=project.id, name=project.name, - namespace=project.namespace.slug, + namespace=project.namespace.path.serialize(), slug=project.slug, creation_date=project.creation_date.isoformat(), created_by=project.created_by, @@ -309,7 +396,146 @@ def _dump_project(project: project_models.Project, with_documentation: bool = Fa keywords=project.keywords or [], template_id=project.template_id, is_template=project.is_template, + secrets_mount_directory=str(project.secrets_mount_directory), ) if with_documentation: result = dict(result, documentation=project.documentation) return result + + @staticmethod + def _dump_project_migration(project_migration: project_models.ProjectMigrationInfo) -> dict[str, Any]: + """Dumps a project migration for API responses.""" + result = dict( + project_id=project_migration.project_id, + v1_id=project_migration.v1_id, + launcher_id=project_migration.launcher_id, + ) + return result + + +@dataclass(kw_only=True) +class ProjectSessionSecretBP(CustomBlueprint): + """Handlers for manipulating session secrets in a project.""" + + session_secret_repo: ProjectSessionSecretRepository + authenticator: base_models.Authenticator + + def get_session_secret_slots(self) -> BlueprintFactoryResponse: + """Get the session secret slots of a project.""" + + @authenticate(self.authenticator) + async def _get_session_secret_slots(_: Request, user: base_models.APIUser, project_id: ULID) -> JSONResponse: + secret_slots = await self.session_secret_repo.get_all_session_secret_slots_from_project( + user=user, project_id=project_id + ) + return validated_json(apispec.SessionSecretSlotList, secret_slots) + + return "/projects//session_secret_slots", ["GET"], _get_session_secret_slots + + def post_session_secret_slot(self) -> BlueprintFactoryResponse: + """Create a new session secret slot on a project.""" + + @authenticate(self.authenticator) + @only_authenticated + @validate(json=apispec.SessionSecretSlotPost) + async def _post_session_secret_slot( + _: Request, user: base_models.APIUser, body: apispec.SessionSecretSlotPost + ) -> JSONResponse: + unsaved_secret_slot = validate_unsaved_session_secret_slot(body) + secret_slot = await self.session_secret_repo.insert_session_secret_slot( + user=user, secret_slot=unsaved_secret_slot + ) + return validated_json(apispec.SessionSecretSlot, secret_slot, status=201) + + return "/session_secret_slots", ["POST"], _post_session_secret_slot + + def get_session_secret_slot(self) -> BlueprintFactoryResponse: + """Get the details of a session secret slot.""" + + @authenticate(self.authenticator) + @extract_if_none_match + async def _get_session_secret_slot( + _: Request, user: base_models.APIUser, slot_id: ULID, etag: str | None + ) -> HTTPResponse: + secret_slot = await self.session_secret_repo.get_session_secret_slot(user=user, slot_id=slot_id) + + if secret_slot.etag == etag: + return HTTPResponse(status=304) + + return validated_json(apispec.SessionSecretSlot, secret_slot) + + return "/session_secret_slots/", ["GET"], _get_session_secret_slot + + def patch_session_secret_slot(self) -> BlueprintFactoryResponse: + """Update specific fields of an existing session secret slot.""" + + @authenticate(self.authenticator) + @only_authenticated + @if_match_required + @validate(json=apispec.SessionSecretSlotPatch) + async def _patch_session_secret_slot( + _: Request, + user: base_models.APIUser, + slot_id: ULID, + body: apispec.SessionSecretSlotPatch, + etag: str, + ) -> JSONResponse: + secret_slot_patch = validate_session_secret_slot_patch(body) + secret_slot = await self.session_secret_repo.update_session_secret_slot( + user=user, slot_id=slot_id, patch=secret_slot_patch, etag=etag + ) + return validated_json(apispec.SessionSecretSlot, secret_slot) + + return "/session_secret_slots/", ["PATCH"], _patch_session_secret_slot + + def delete_session_secret_slot(self) -> BlueprintFactoryResponse: + """Remove a session secret slot.""" + + @authenticate(self.authenticator) + @only_authenticated + async def _delete_session_secret_slot(_: Request, user: base_models.APIUser, slot_id: ULID) -> HTTPResponse: + await self.session_secret_repo.delete_session_secret_slot(user=user, slot_id=slot_id) + return HTTPResponse(status=204) + + return "/session_secret_slots/", ["DELETE"], _delete_session_secret_slot + + def get_session_secrets(self) -> BlueprintFactoryResponse: + """Get the current user's secrets of a project.""" + + @authenticate(self.authenticator) + @only_authenticated + async def _get_session_secrets(_: Request, user: base_models.APIUser, project_id: ULID) -> JSONResponse: + secrets = await self.session_secret_repo.get_all_session_secrets_from_project( + user=user, project_id=project_id + ) + return validated_json(apispec.SessionSecretList, secrets) + + return "/projects//session_secrets", ["GET"], _get_session_secrets + + def patch_session_secrets(self) -> BlueprintFactoryResponse: + """Save user secrets for a project.""" + + @authenticate(self.authenticator) + @only_authenticated + @validate_body_root_model(json=apispec.SessionSecretPatchList) + async def _patch_session_secrets( + _: Request, user: base_models.APIUser, project_id: ULID, body: apispec.SessionSecretPatchList + ) -> JSONResponse: + secrets_patch = validate_session_secrets_patch(body) + secrets = await self.session_secret_repo.patch_session_secrets( + user=user, project_id=project_id, secrets=secrets_patch + ) + return validated_json(apispec.SessionSecretList, secrets) + + return "/projects//session_secrets", ["PATCH"], _patch_session_secrets + + def delete_session_secrets(self) -> BlueprintFactoryResponse: + """Remove all user secrets for a project.""" + + @authenticate(self.authenticator) + @only_authenticated + async def _delete_session_secrets(_: Request, user: base_models.APIUser, project_id: ULID) -> HTTPResponse: + await self.session_secret_repo.delete_session_secrets(user=user, project_id=project_id) + return HTTPResponse(status=204) + + return "/projects//session_secrets", ["DELETE"], _delete_session_secrets diff --git a/components/renku_data_services/project/constants.py b/components/renku_data_services/project/constants.py new file mode 100644 index 000000000..8dddc1c51 --- /dev/null +++ b/components/renku_data_services/project/constants.py @@ -0,0 +1,41 @@ +"""Constant values used for projects.""" + +from pathlib import PurePosixPath +from typing import Final + +DEFAULT_SESSION_SECRETS_MOUNT_DIR_STR: Final[str] = "/secrets" +"""The default location where the secrets will be provided inside sessions, as a string.""" + +DEFAULT_SESSION_SECRETS_MOUNT_DIR: Final[PurePosixPath] = PurePosixPath(DEFAULT_SESSION_SECRETS_MOUNT_DIR_STR) +"""The default location where the secrets will be provided inside sessions.""" + +MIGRATION_ARGS: Final[list[str]] = [ + "jupyter server --ServerApp.ip=$RENKU_SESSION_IP " + "--ServerApp.port=$RENKU_SESSION_PORT " + "--ServerApp.allow_origin=* " + "--ServerApp.base_url=$RENKU_BASE_URL_PATH " + "--ServerApp.root_dir=$RENKU_WORKING_DIR " + "--ServerApp.allow_remote_access=True " + "--ContentsManager.allow_hidden=True " + '--ServerApp.token="" ' + '--ServerApp.password=""' +] +"""The command-line arguments for migrating a v1 project.""" + +MIGRATION_COMMAND: Final[list[str]] = ["sh", "-c"] +"""The command to run for migrating the v1 project.""" + +MIGRATION_PORT: Final[int] = 8888 +"""The port to use for migrating the v1 project.""" + +MIGRATION_WORKING_DIRECTORY: Final[str] = "/home/jovyan/work" +"""The working directory for migrating the v1 project.""" + +MIGRATION_MOUNT_DIRECTORY: Final[str] = "/home/jovyan/work" +"""The mount directory for migrating the v1 project.""" + +MIGRATION_UID: Final[int] = 1000 +"""The UID for migrating the v1 project.""" + +MIGRATION_GID: Final[int] = 1000 +"""The GID for migrating the v1 project.""" diff --git a/components/renku_data_services/project/core.py b/components/renku_data_services/project/core.py index dc63bd0e0..504f71346 100644 --- a/components/renku_data_services/project/core.py +++ b/components/renku_data_services/project/core.py @@ -1,34 +1,68 @@ """Business logic for projects.""" +from pathlib import PurePosixPath +from urllib.parse import urlparse + from ulid import ULID +from renku_data_services import errors from renku_data_services.authz.models import Visibility -from renku_data_services.base_models import APIUser, Slug -from renku_data_services.data_connectors.db import DataConnectorProjectLinkRepository -from renku_data_services.errors import errors +from renku_data_services.base_models import RESET, APIUser, ResetType, Slug +from renku_data_services.data_connectors.db import DataConnectorRepository from renku_data_services.project import apispec, models from renku_data_services.project.db import ProjectRepository from renku_data_services.session.db import SessionRepository +def validate_unsaved_project(body: apispec.ProjectPost, created_by: str) -> models.UnsavedProject: + """Validate an unsaved project.""" + keywords = [kw.root for kw in body.keywords] if body.keywords is not None else [] + visibility = Visibility.PRIVATE if body.visibility is None else Visibility(body.visibility.value) + secrets_mount_directory = PurePosixPath(body.secrets_mount_directory) if body.secrets_mount_directory else None + repositories = _validate_repositories(body.repositories) + return models.UnsavedProject( + name=body.name, + namespace=body.namespace, + slug=body.slug or Slug.from_name(body.name).value, + description=body.description, + repositories=repositories or [], + created_by=created_by, + visibility=visibility, + keywords=keywords, + documentation=body.documentation, + secrets_mount_directory=secrets_mount_directory, + ) + + def validate_project_patch(patch: apispec.ProjectPatch) -> models.ProjectPatch: """Validate the update to a project.""" keywords = [kw.root for kw in patch.keywords] if patch.keywords is not None else None + secrets_mount_directory: PurePosixPath | ResetType | None + match patch.secrets_mount_directory: + case "": + secrets_mount_directory = RESET + case str(): + secrets_mount_directory = PurePosixPath(patch.secrets_mount_directory) + case _: + secrets_mount_directory = None + repositories = _validate_repositories(patch.repositories) return models.ProjectPatch( name=patch.name, namespace=patch.namespace, + slug=patch.slug, visibility=Visibility(patch.visibility.value) if patch.visibility is not None else None, - repositories=patch.repositories, + repositories=repositories, description=patch.description, keywords=keywords, documentation=patch.documentation, template_id=None if patch.template_id is None else "", is_template=patch.is_template, + secrets_mount_directory=secrets_mount_directory, ) async def copy_project( - project_id: ULID, + source_project_id: ULID, user: APIUser, name: str, namespace: str, @@ -37,42 +71,146 @@ async def copy_project( repositories: list[models.Repository] | None, visibility: Visibility | None, keywords: list[str], + secrets_mount_directory: str | None, project_repo: ProjectRepository, session_repo: SessionRepository, - data_connector_to_project_link_repo: DataConnectorProjectLinkRepository, + data_connector_repo: DataConnectorRepository, ) -> models.Project: """Create a copy of a given project.""" - template = await project_repo.get_project(user=user, project_id=project_id) + template = await project_repo.get_project(user=user, project_id=source_project_id, with_documentation=True) + repositories_ = _validate_repositories(repositories) unsaved_project = models.UnsavedProject( name=name, namespace=namespace, slug=slug or Slug.from_name(name).value, description=description or template.description, - repositories=repositories or template.repositories, + repositories=repositories_ or template.repositories, created_by=user.id, # type: ignore[arg-type] visibility=template.visibility if visibility is None else visibility, keywords=keywords or template.keywords, template_id=template.id, + secrets_mount_directory=PurePosixPath(secrets_mount_directory) if secrets_mount_directory else None, + documentation=template.documentation, ) project = await project_repo.insert_project(user, unsaved_project) # NOTE: Copy session launchers - launchers = await session_repo.get_project_launchers(user=user, project_id=project_id) + launchers = await session_repo.get_project_launchers(user=user, project_id=source_project_id) for launcher in launchers: await session_repo.copy_launcher(user=user, project_id=project.id, launcher=launcher) # NOTE: Copy data connector links. If this operation fails due to lack of permission, still proceed to create the # copy but return an error code that reflects this - copy_error = False - dc_links = await data_connector_to_project_link_repo.get_links_to(user=user, project_id=project_id) + uncopied_dc_ids: list[ULID] = [] + dc_links = await data_connector_repo.get_links_to(user=user, project_id=source_project_id) for dc_link in dc_links: try: - await data_connector_to_project_link_repo.copy_link(user=user, project_id=project.id, link=dc_link) + await data_connector_repo.copy_link(user=user, target_project_id=project.id, link=dc_link) except errors.MissingResourceError: - copy_error = True + uncopied_dc_ids.append(dc_link.data_connector_id) - if copy_error: - raise errors.CopyDataConnectorsError() + if uncopied_dc_ids: + data_connectors_names_ids = await data_connector_repo.get_data_connectors_names_and_ids(user, uncopied_dc_ids) + dc_str = ", ".join([f"{name} ({id})" for name, id in data_connectors_names_ids]) + if len(data_connectors_names_ids) == 1: + message = ( + f"The project was copied but data connector with name '{dc_str}' was not able to be linked to " + "your copy of this project due to insufficient permissions. To make a copy that includes the data " + "connector, ask its owner to make it public." + ) + else: + message = ( + f"The project was copied but data connectors with names '[{dc_str}]' were not able to be linked to " + "your copy of this project due to insufficient permissions. To make a copy that includes the data " + "connectors, ask their owners to make them public." + ) + raise errors.CopyDataConnectorsError(message=message) return project + + +def validate_unsaved_session_secret_slot( + body: apispec.SessionSecretSlotPost, +) -> models.UnsavedSessionSecretSlot: + """Validate an unsaved secret slot.""" + _validate_session_launcher_secret_slot_filename(body.filename) + return models.UnsavedSessionSecretSlot( + project_id=ULID.from_str(body.project_id), + name=body.name, + description=body.description, + filename=body.filename, + ) + + +def validate_session_secret_slot_patch( + body: apispec.SessionSecretSlotPatch, +) -> models.SessionSecretSlotPatch: + """Validate the update to a secret slot.""" + if body.filename is not None: + _validate_session_launcher_secret_slot_filename(body.filename) + return models.SessionSecretSlotPatch( + name=body.name, + description=body.description, + filename=body.filename, + ) + + +def validate_session_secrets_patch( + body: apispec.SessionSecretPatchList, +) -> list[models.SessionSecretPatchExistingSecret | models.SessionSecretPatchSecretValue]: + """Validate the update to a session launcher's secrets.""" + result: list[models.SessionSecretPatchExistingSecret | models.SessionSecretPatchSecretValue] = [] + seen_slot_ids: set[str] = set() + for item in body.root: + if item.secret_slot_id in seen_slot_ids: + raise errors.ValidationError( + message=f"Found duplicate secret_slot_id '{item.secret_slot_id}' in the list of secrets." + ) + seen_slot_ids.add(item.secret_slot_id) + + if isinstance(item, apispec.SessionSecretPatch2): + result.append( + models.SessionSecretPatchExistingSecret( + secret_slot_id=ULID.from_str(item.secret_slot_id), + secret_id=ULID.from_str(item.secret_id), + ) + ) + else: + result.append( + models.SessionSecretPatchSecretValue( + secret_slot_id=ULID.from_str(item.secret_slot_id), + value=item.value, + ) + ) + return result + + +def _validate_repositories(repositories: list[str] | None) -> list[str] | None: + """Validate a list of git repositories.""" + if repositories is None: + return None + seen: set[str] = set() + without_duplicates: list[str] = [] + for repo in repositories: + repo = _validate_repository(repo) + if repo not in seen: + without_duplicates.append(repo) + seen.add(repo) + return without_duplicates + + +def _validate_repository(repository: str) -> str: + """Validate a git repository.""" + stripped = repository.strip() + parsed = urlparse(stripped) + if parsed.scheme not in ["http", "https"]: + raise errors.ValidationError(message=f'The repository URL "{repository}" is not a valid HTTP or HTTPS URL.') + return stripped + + +def _validate_session_launcher_secret_slot_filename(filename: str) -> None: + """Validate the filename field of a secret slot.""" + filename_candidate = PurePosixPath(filename) + if filename_candidate.name != filename: + raise errors.ValidationError(message=f"Filename {filename} is not valid.") diff --git a/components/renku_data_services/project/db.py b/components/renku_data_services/project/db.py index e9550c4b6..e960de8f1 100644 --- a/components/renku_data_services/project/db.py +++ b/components/renku_data_services/project/db.py @@ -3,10 +3,14 @@ from __future__ import annotations import functools +import random +import string from collections.abc import AsyncGenerator, Awaitable, Callable from datetime import UTC, datetime +from pathlib import PurePosixPath from typing import Concatenate, ParamSpec, TypeVar +from cryptography.hazmat.primitives.asymmetric import rsa from sqlalchemy import Select, delete, func, select, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import undefer @@ -18,17 +22,25 @@ from renku_data_services.authz.authz import Authz, AuthzOperation, ResourceType from renku_data_services.authz.models import CheckPermissionItem, Member, MembershipChange, Scope from renku_data_services.base_api.pagination import PaginationRequest -from renku_data_services.message_queue import events -from renku_data_services.message_queue.avro_models.io.renku.events import v2 as avro_schema_v2 -from renku_data_services.message_queue.db import EventRepository -from renku_data_services.message_queue.interface import IMessageQueue -from renku_data_services.message_queue.redis_queue import dispatch_message +from renku_data_services.base_models import RESET +from renku_data_services.base_models.core import Slug from renku_data_services.namespace import orm as ns_schemas from renku_data_services.namespace.db import GroupRepository from renku_data_services.project import apispec as project_apispec -from renku_data_services.project import models +from renku_data_services.project import constants, models from renku_data_services.project import orm as schemas +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.decorators import update_search_document +from renku_data_services.secrets import orm as secrets_schemas +from renku_data_services.secrets.core import encrypt_user_secret +from renku_data_services.secrets.models import SecretKind +from renku_data_services.session import apispec as session_apispec +from renku_data_services.session.core import ( + validate_unsaved_session_launcher, +) +from renku_data_services.session.db import SessionRepository from renku_data_services.storage import orm as storage_schemas +from renku_data_services.users.db import UserRepo from renku_data_services.users.orm import UserORM from renku_data_services.utils.core import with_db_transaction @@ -39,15 +51,13 @@ class ProjectRepository: def __init__( self, session_maker: Callable[..., AsyncSession], - message_queue: IMessageQueue, - event_repo: EventRepository, group_repo: GroupRepository, + search_updates_repo: SearchUpdatesRepo, authz: Authz, ) -> None: self.session_maker = session_maker - self.message_queue: IMessageQueue = message_queue - self.event_repo: EventRepository = event_repo self.group_repo: GroupRepository = group_repo + self.search_updates_repo: SearchUpdatesRepo = search_updates_repo self.authz = authz async def get_projects( @@ -67,7 +77,7 @@ async def get_projects( stmt = select(schemas.ProjectORM) stmt = stmt.where(schemas.ProjectORM.id.in_(project_ids)) if namespace: - stmt = _filter_by_namespace_slug(stmt, namespace) + stmt = _filter_projects_by_namespace_slug(stmt, namespace) stmt = stmt.order_by(coalesce(schemas.ProjectORM.updated_at, schemas.ProjectORM.creation_date).desc()) @@ -77,7 +87,7 @@ async def get_projects( select(func.count()).select_from(schemas.ProjectORM).where(schemas.ProjectORM.id.in_(project_ids)) ) if namespace: - stmt_count = _filter_by_namespace_slug(stmt_count, namespace) + stmt_count = _filter_projects_by_namespace_slug(stmt_count, namespace) results = await session.scalars(stmt), await session.scalar(stmt_count) projects_orm = results[0].all() total_elements = results[1] or 0 @@ -138,17 +148,54 @@ async def get_all_copied_projects( return [p.dump() for p in project_orms] async def get_project_by_namespace_slug( - self, user: base_models.APIUser, namespace: str, slug: str, with_documentation: bool = False + self, user: base_models.APIUser, namespace: str, slug: Slug, with_documentation: bool = False ) -> models.Project: """Get one project from the database.""" async with self.session_maker() as session: stmt = select(schemas.ProjectORM) - stmt = _filter_by_namespace_slug(stmt, namespace) - stmt = stmt.where(schemas.ProjectORM.slug.has(ns_schemas.EntitySlugORM.slug == slug)) + stmt = _filter_projects_by_namespace_slug(stmt, namespace) + stmt = stmt.where(schemas.ProjectORM.slug.has(ns_schemas.EntitySlugORM.slug == slug.value)) if with_documentation: stmt = stmt.options(undefer(schemas.ProjectORM.documentation)) - result = await session.execute(stmt) - project_orm = result.scalars().first() + result = await session.scalars(stmt) + project_orm = result.first() + + if project_orm is None: + old_project_stmt_old_ns_current_slug = ( + select(schemas.ProjectORM.id) + .where(ns_schemas.NamespaceOldORM.slug == namespace.lower()) + .where(ns_schemas.NamespaceOldORM.latest_slug_id == ns_schemas.NamespaceORM.id) + .where(ns_schemas.EntitySlugORM.namespace_id == ns_schemas.NamespaceORM.id) + .where(schemas.ProjectORM.id == ns_schemas.EntitySlugORM.project_id) + .where(schemas.ProjectORM.slug.has(ns_schemas.EntitySlugORM.slug == slug.value)) + ) + old_project_stmt_current_ns_old_slug = ( + select(schemas.ProjectORM.id) + .where(ns_schemas.NamespaceORM.slug == namespace.lower()) + .where(ns_schemas.EntitySlugORM.namespace_id == ns_schemas.NamespaceORM.id) + .where(schemas.ProjectORM.id == ns_schemas.EntitySlugORM.project_id) + .where(ns_schemas.EntitySlugOldORM.slug == slug.value) + .where(ns_schemas.EntitySlugOldORM.latest_slug_id == ns_schemas.EntitySlugORM.id) + ) + old_project_stmt_old_ns_old_slug = ( + select(schemas.ProjectORM.id) + .where(ns_schemas.NamespaceOldORM.slug == namespace.lower()) + .where(ns_schemas.NamespaceOldORM.latest_slug_id == ns_schemas.NamespaceORM.id) + .where(ns_schemas.EntitySlugORM.namespace_id == ns_schemas.NamespaceORM.id) + .where(schemas.ProjectORM.id == ns_schemas.EntitySlugORM.project_id) + .where(ns_schemas.EntitySlugOldORM.slug == slug.value) + .where(ns_schemas.EntitySlugOldORM.latest_slug_id == ns_schemas.EntitySlugORM.id) + ) + old_project_stmt = old_project_stmt_old_ns_current_slug.union( + old_project_stmt_current_ns_old_slug, old_project_stmt_old_ns_old_slug + ) + result_old = await session.scalars(old_project_stmt) + result_old_id = result_old.first() + if result_old_id is not None: + stmt = select(schemas.ProjectORM).where(schemas.ProjectORM.id == result_old_id) + if with_documentation: + stmt = stmt.options(undefer(schemas.ProjectORM.documentation)) + project_orm = (await session.scalars(stmt)).first() not_found_msg = ( f"Project with identifier '{namespace}/{slug}' does not exist or you do not have access to it." @@ -170,7 +217,7 @@ async def get_project_by_namespace_slug( @with_db_transaction @Authz.authz_change(AuthzOperation.create, ResourceType.project) - @dispatch_message(avro_schema_v2.ProjectCreated) + @update_search_document async def insert_project( self, user: base_models.APIUser, @@ -209,6 +256,8 @@ async def insert_project( select(ns_schemas.EntitySlugORM) .where(ns_schemas.EntitySlugORM.namespace_id == ns.id) .where(ns_schemas.EntitySlugORM.slug == slug) + .where(ns_schemas.EntitySlugORM.data_connector_id.is_(None)) + .where(ns_schemas.EntitySlugORM.project_id.is_not(None)), ) if existing_slug is not None: raise errors.ConflictError(message=f"An entity with the slug '{ns.slug}/{slug}' already exists.") @@ -228,6 +277,7 @@ async def insert_project( keywords=project.keywords, documentation=project.documentation, template_id=project.template_id, + secrets_mount_directory=project.secrets_mount_directory or constants.DEFAULT_SESSION_SECRETS_MOUNT_DIR, ) project_slug = ns_schemas.EntitySlugORM.create_project_slug(slug, project_id=project_orm.id, namespace_id=ns.id) @@ -235,12 +285,11 @@ async def insert_project( session.add(project_slug) await session.flush() await session.refresh(project_orm) - return project_orm.dump() @with_db_transaction @Authz.authz_change(AuthzOperation.update, ResourceType.project) - @dispatch_message(avro_schema_v2.ProjectUpdated) + @update_search_document async def update_project( self, user: base_models.APIUser, @@ -263,22 +312,25 @@ async def update_project( if patch.visibility is not None and patch.visibility != old_project.visibility: # NOTE: changing the visibility requires the user to be owner which means they should have DELETE permission required_scope = Scope.DELETE - if patch.namespace is not None and patch.namespace != old_project.namespace.slug: + if patch.namespace is not None and patch.namespace != old_project.namespace.path.first.value: # NOTE: changing the namespace requires the user to be owner which means they should have DELETE permission required_scope = Scope.DELETE + if patch.slug is not None and patch.slug != old_project.slug: + # NOTE: changing the slug requires the user to be owner which means they should have DELETE permission + required_scope = Scope.DELETE authorized = await self.authz.has_permission(user, ResourceType.project, project_id, required_scope) if not authorized: raise errors.MissingResourceError( message=f"Project with id '{project_id}' does not exist or you do not have access to it." ) - current_etag = project.dump().etag + current_etag = old_project.etag if etag is not None and current_etag != etag: raise errors.ConflictError(message=f"Current ETag is {current_etag}, not {etag}.") if patch.name is not None: project.name = patch.name - if patch.namespace is not None and patch.namespace != old_project.namespace.slug: + if patch.namespace is not None and patch.namespace != old_project.namespace.path.first.value: ns = await session.scalar( select(ns_schemas.NamespaceORM).where(ns_schemas.NamespaceORM.slug == patch.namespace.lower()) ) @@ -295,6 +347,27 @@ async def update_project( message=f"The project cannot be moved because you do not have sufficient permissions with the namespace {patch.namespace}" # noqa: E501 ) project.slug.namespace_id = ns.id + # Trigger update for ``updated_at`` column + await session.execute(update(schemas.ProjectORM).where(schemas.ProjectORM.id == project_id).values()) + if patch.slug is not None and patch.slug != old_project.slug: + namespace_id = project.slug.namespace_id + existing_entity = await session.scalar( + select(ns_schemas.EntitySlugORM) + .where(ns_schemas.EntitySlugORM.slug == patch.slug) + .where(ns_schemas.EntitySlugORM.namespace_id == namespace_id) + ) + if existing_entity is not None: + raise errors.ConflictError( + message=f"An entity with the slug '{project.slug.namespace.slug}/{patch.slug}' already exists." + ) + session.add( + ns_schemas.EntitySlugOldORM( + slug=old_project.slug, latest_slug_id=project.slug.id, project_id=project.id, data_connector_id=None + ) + ) + project.slug.slug = patch.slug + # Trigger update for ``updated_at`` column + await session.execute(update(schemas.ProjectORM).where(schemas.ProjectORM.id == project_id).values()) if patch.visibility is not None: visibility_orm = ( project_apispec.Visibility(patch.visibility) @@ -314,11 +387,14 @@ async def update_project( project.keywords = patch.keywords if patch.keywords else None if patch.documentation is not None: project.documentation = patch.documentation - if patch.template_id is not None: project.template_id = None if patch.is_template is not None: project.is_template = patch.is_template + if patch.secrets_mount_directory is not None and patch.secrets_mount_directory is RESET: + project.secrets_mount_directory = constants.DEFAULT_SESSION_SECRETS_MOUNT_DIR + elif patch.secrets_mount_directory is not None and isinstance(patch.secrets_mount_directory, PurePosixPath): + project.secrets_mount_directory = patch.secrets_mount_directory await session.flush() await session.refresh(project) @@ -330,7 +406,7 @@ async def update_project( @with_db_transaction @Authz.authz_change(AuthzOperation.delete, ResourceType.project) - @dispatch_message(avro_schema_v2.ProjectRemoved) + @update_search_document async def delete_project( self, user: base_models.APIUser, project_id: ULID, *, session: AsyncSession | None = None ) -> models.DeletedProject | None: @@ -386,12 +462,14 @@ async def get_project_permissions(self, user: base_models.APIUser, project_id: U _T = TypeVar("_T") -def _filter_by_namespace_slug(statement: Select[tuple[_T]], namespace: str) -> Select[tuple[_T]]: +def _filter_projects_by_namespace_slug(statement: Select[tuple[_T]], namespace: str) -> Select[tuple[_T]]: """Filters a select query on projects to a given namespace.""" - return ( - statement.where(ns_schemas.NamespaceORM.slug == namespace.lower()) - .where(ns_schemas.EntitySlugORM.namespace_id == ns_schemas.NamespaceORM.id) - .where(schemas.ProjectORM.id == ns_schemas.EntitySlugORM.project_id) + return statement.where( + schemas.ProjectORM.slug.has( + ns_schemas.EntitySlugORM.namespace.has( + ns_schemas.NamespaceORM.slug == namespace.lower(), + ) + ) ) @@ -431,14 +509,10 @@ class ProjectMemberRepository: def __init__( self, session_maker: Callable[..., AsyncSession], - event_repo: EventRepository, authz: Authz, - message_queue: IMessageQueue, ) -> None: self.session_maker = session_maker - self.event_repo = event_repo self.authz = authz - self.message_queue = message_queue @with_db_transaction @_project_exists @@ -452,7 +526,6 @@ async def get_members( @with_db_transaction @_project_exists - @dispatch_message(events.ProjectMembershipChanged) async def update_members( self, user: base_models.APIUser, @@ -483,7 +556,6 @@ async def update_members( @with_db_transaction @_project_exists - @dispatch_message(events.ProjectMembershipChanged) async def delete_members( self, user: base_models.APIUser, project_id: ULID, user_ids: list[str], *, session: AsyncSession | None = None ) -> list[MembershipChange]: @@ -493,3 +565,486 @@ async def delete_members( members = await self.authz.remove_project_members(user, ResourceType.project, project_id, user_ids) return members + + +class ProjectSessionSecretRepository: + """Repository for session secrets.""" + + def __init__( + self, + session_maker: Callable[..., AsyncSession], + authz: Authz, + user_repo: UserRepo, + secret_service_public_key: rsa.RSAPublicKey, + ) -> None: + self.session_maker = session_maker + self.authz = authz + self.user_repo = user_repo + self.secret_service_public_key = secret_service_public_key + + async def get_all_session_secret_slots_from_project( + self, + user: base_models.APIUser, + project_id: ULID, + ) -> list[models.SessionSecretSlot]: + """Get all session secret slots from a project.""" + # Check that the user is allowed to access the project + authorized = await self.authz.has_permission(user, ResourceType.project, project_id, Scope.READ) + if not authorized: + raise errors.MissingResourceError( + message=f"Project with id '{project_id}' does not exist or you do not have access to it." + ) + async with self.session_maker() as session: + result = await session.scalars( + select(schemas.SessionSecretSlotORM) + .where(schemas.SessionSecretSlotORM.project_id == project_id) + .order_by(schemas.SessionSecretSlotORM.id.desc()) + ) + secret_slots = result.all() + return [s.dump() for s in secret_slots] + + async def get_session_secret_slot( + self, + user: base_models.APIUser, + slot_id: ULID, + ) -> models.SessionSecretSlot: + """Get one session secret slot from the database.""" + async with self.session_maker() as session, session.begin(): + result = await session.scalars( + select(schemas.SessionSecretSlotORM).where(schemas.SessionSecretSlotORM.id == slot_id) + ) + secret_slot = result.one_or_none() + + authorized = ( + await self.authz.has_permission(user, ResourceType.project, secret_slot.project_id, Scope.READ) + if secret_slot is not None + else False + ) + if not authorized or secret_slot is None: + raise errors.MissingResourceError( + message=f"Session secret slot with id '{slot_id}' does not exist or you do not have access to it." + ) + + return secret_slot.dump() + + async def insert_session_secret_slot( + self, user: base_models.APIUser, secret_slot: models.UnsavedSessionSecretSlot + ) -> models.SessionSecretSlot: + """Insert a new session secret slot entry.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + # Check that the user is allowed to access the project + authorized = await self.authz.has_permission(user, ResourceType.project, secret_slot.project_id, Scope.WRITE) + if not authorized: + raise errors.MissingResourceError( + message=f"Project with id '{secret_slot.project_id}' does not exist or you do not have access to it." + ) + + async with self.session_maker() as session, session.begin(): + existing_secret_slot = await session.scalar( + select(schemas.SessionSecretSlotORM) + .where(schemas.SessionSecretSlotORM.project_id == secret_slot.project_id) + .where(schemas.SessionSecretSlotORM.filename == secret_slot.filename) + ) + if existing_secret_slot is not None: + raise errors.ConflictError( + message=f"A session secret slot with the filename '{secret_slot.filename}' already exists." + ) + + secret_slot_orm = schemas.SessionSecretSlotORM( + project_id=secret_slot.project_id, + name=secret_slot.name or secret_slot.filename, + description=secret_slot.description if secret_slot.description else None, + filename=secret_slot.filename, + created_by_id=user.id, + ) + + session.add(secret_slot_orm) + await session.flush() + await session.refresh(secret_slot_orm) + + return secret_slot_orm.dump() + + async def update_session_secret_slot( + self, user: base_models.APIUser, slot_id: ULID, patch: models.SessionSecretSlotPatch, etag: str + ) -> models.SessionSecretSlot: + """Update a session secret slot entry.""" + not_found_msg = f"Session secret slot with id '{slot_id}' does not exist or you do not have access to it." + + async with self.session_maker() as session, session.begin(): + result = await session.scalars( + select(schemas.SessionSecretSlotORM).where(schemas.SessionSecretSlotORM.id == slot_id) + ) + secret_slot = result.one_or_none() + if secret_slot is None: + raise errors.MissingResourceError(message=not_found_msg) + + authorized = await self.authz.has_permission( + user, ResourceType.project, secret_slot.project_id, Scope.WRITE + ) + if not authorized: + raise errors.MissingResourceError(message=not_found_msg) + + current_etag = secret_slot.dump().etag + if current_etag != etag: + raise errors.ConflictError(message=f"Current ETag is {current_etag}, not {etag}.") + + if patch.name is not None: + secret_slot.name = patch.name + if patch.description is not None: + secret_slot.description = patch.description if patch.description else None + if patch.filename is not None and patch.filename != secret_slot.filename: + existing_secret_slot = await session.scalar( + select(schemas.SessionSecretSlotORM) + .where(schemas.SessionSecretSlotORM.project_id == secret_slot.project_id) + .where(schemas.SessionSecretSlotORM.filename == patch.filename) + ) + if existing_secret_slot is not None: + raise errors.ConflictError( + message=f"A session secret slot with the filename '{patch.filename}' already exists." + ) + secret_slot.filename = patch.filename + + await session.flush() + await session.refresh(secret_slot) + + return secret_slot.dump() + + async def delete_session_secret_slot( + self, + user: base_models.APIUser, + slot_id: ULID, + ) -> None: + """Delete a session secret slot.""" + async with self.session_maker() as session, session.begin(): + result = await session.scalars( + select(schemas.SessionSecretSlotORM).where(schemas.SessionSecretSlotORM.id == slot_id) + ) + secret_slot = result.one_or_none() + if secret_slot is None: + return None + + authorized = await self.authz.has_permission( + user, ResourceType.project, secret_slot.project_id, Scope.WRITE + ) + if not authorized: + raise errors.MissingResourceError( + message=f"Session secret slot with id '{slot_id}' does not exist or you do not have access to it." + ) + + await session.delete(secret_slot) + + async def get_all_session_secrets_from_project( + self, + user: base_models.APIUser, + project_id: ULID, + ) -> list[models.SessionSecret]: + """Get all session secrets from a project.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + # Check that the user is allowed to access the project + authorized = await self.authz.has_permission(user, ResourceType.project, project_id, Scope.READ) + if not authorized: + raise errors.MissingResourceError( + message=f"Project with id '{project_id}' does not exist or you do not have access to it." + ) + + async with self.session_maker() as session: + result = await session.scalars( + select(schemas.SessionSecretORM) + .where(schemas.SessionSecretORM.user_id == user.id) + .where(schemas.SessionSecretORM.secret_slot_id == schemas.SessionSecretSlotORM.id) + .where(schemas.SessionSecretSlotORM.project_id == project_id) + .order_by(schemas.SessionSecretORM.id.desc()) + ) + secrets = result.all() + + return [s.dump() for s in secrets] + + async def patch_session_secrets( + self, + user: base_models.APIUser, + project_id: ULID, + secrets: list[models.SessionSecretPatchExistingSecret | models.SessionSecretPatchSecretValue], + ) -> list[models.SessionSecret]: + """Create, update or remove session secrets.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + # Check that the user is allowed to access the project + authorized = await self.authz.has_permission(user, ResourceType.project, project_id, Scope.READ) + if not authorized: + raise errors.MissingResourceError( + message=f"Project with id '{project_id}' does not exist or you do not have access to it." + ) + + secrets_as_dict = {s.secret_slot_id: s for s in secrets} + + async with self.session_maker() as session, session.begin(): + result = await session.scalars( + select(schemas.SessionSecretORM) + .where(schemas.SessionSecretORM.user_id == user.id) + .where(schemas.SessionSecretORM.secret_slot_id == schemas.SessionSecretSlotORM.id) + .where(schemas.SessionSecretSlotORM.project_id == project_id) + ) + existing_secrets = result.all() + existing_secrets_as_dict = {s.secret_slot_id: s for s in existing_secrets} + + result_slots = await session.scalars( + select(schemas.SessionSecretSlotORM).where(schemas.SessionSecretSlotORM.project_id == project_id) + ) + secret_slots = result_slots.all() + secret_slots_as_dict = {s.id: s for s in secret_slots} + + all_secrets = [] + + for slot_id, secret_update in secrets_as_dict.items(): + secret_slot = secret_slots_as_dict.get(slot_id) + if secret_slot is None: + raise errors.ValidationError( + message=f"Session secret slot with id '{slot_id}' does not exist or you do not have access to it." # noqa: E501 + ) + + if isinstance(secret_update, models.SessionSecretPatchExistingSecret): + # Update the secret_id + if session_launcher_secret_orm := existing_secrets_as_dict.get(slot_id): + session_launcher_secret_orm.secret_id = secret_update.secret_id + else: + session_launcher_secret_orm = schemas.SessionSecretORM( + secret_slot_id=secret_update.secret_slot_id, + secret_id=secret_update.secret_id, + user_id=user.id, + ) + session.add(session_launcher_secret_orm) + await session.flush() + await session.refresh(session_launcher_secret_orm) + all_secrets.append(session_launcher_secret_orm.dump()) + continue + + if secret_update.value is None: + # Remove the secret + session_launcher_secret_orm = existing_secrets_as_dict.get(slot_id) + if session_launcher_secret_orm is None: + continue + await session.delete(session_launcher_secret_orm) + del existing_secrets_as_dict[slot_id] + continue + + encrypted_value, encrypted_key = await encrypt_user_secret( + user_repo=self.user_repo, + requested_by=user, + secret_service_public_key=self.secret_service_public_key, + secret_value=secret_update.value, + ) + if session_launcher_secret_orm := existing_secrets_as_dict.get(slot_id): + session_launcher_secret_orm.secret.update( + encrypted_value=encrypted_value, encrypted_key=encrypted_key + ) + else: + name = secret_slot.name + suffix = "".join([random.choice(string.ascii_lowercase + string.digits) for _ in range(8)]) # nosec B311 + name_slug = base_models.Slug.from_name(name).value + default_filename = f"{name_slug[:200]}-{suffix}" + secret_orm = secrets_schemas.SecretORM( + name=name, + default_filename=default_filename, + user_id=user.id, + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + kind=SecretKind.general, + ) + session_launcher_secret_orm = schemas.SessionSecretORM( + secret_slot_id=secret_update.secret_slot_id, + secret_id=secret_orm.id, + user_id=user.id, + ) + session.add(secret_orm) + session.add(session_launcher_secret_orm) + await session.flush() + await session.refresh(session_launcher_secret_orm) + all_secrets.append(session_launcher_secret_orm.dump()) + + return all_secrets + + async def delete_session_secrets( + self, + user: base_models.APIUser, + project_id: ULID, + ) -> None: + """Delete all session secrets associated with a project.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + result = await session.scalars( + select(schemas.SessionSecretORM) + .where(schemas.SessionSecretORM.user_id == user.id) + .where(schemas.SessionSecretORM.secret_slot_id == schemas.SessionSecretSlotORM.id) + .where(schemas.SessionSecretSlotORM.project_id == project_id) + ) + for secret in result: + await session.delete(secret) + + +class ProjectMigrationRepository: + """Repository for project migrations.""" + + def __init__( + self, + session_maker: Callable[..., AsyncSession], + authz: Authz, + project_repo: ProjectRepository, + session_repo: SessionRepository, + ) -> None: + self.session_maker = session_maker + self.authz = authz + self.project_repo = project_repo + self.session_repo = session_repo + + async def get_project_migrations( + self, + user: base_models.APIUser, + ) -> AsyncGenerator[models.ProjectMigrationInfo, None]: + """Get all project migrations from the database.""" + project_ids = await self.authz.resources_with_permission(user, user.id, ResourceType.project, Scope.READ) + + async with self.session_maker() as session: + stmt = select(schemas.ProjectMigrationsORM).where(schemas.ProjectMigrationsORM.project_id.in_(project_ids)) + result = await session.stream_scalars(stmt) + async for migration in result: + yield migration.dump() + + @with_db_transaction + @Authz.authz_change(AuthzOperation.create, ResourceType.project) + async def migrate_v1_project( + self, + user: base_models.APIUser, + project: models.UnsavedProject, + project_v1_id: int, + session_launcher: project_apispec.MigrationSessionLauncherPost | None = None, + session: AsyncSession | None = None, + ) -> models.Project: + """Migrate a v1 project by creating a new project and tracking the migration.""" + if not session: + raise errors.ProgrammingError(message="A database session is required") + + result = await session.scalars( + select(schemas.ProjectMigrationsORM).where(schemas.ProjectMigrationsORM.project_v1_id == project_v1_id) + ) + project_migration = result.one_or_none() + if project_migration is not None: + raise errors.ValidationError(message=f"Project V1 with id '{project_v1_id}' already exists.") + created_project = await self.project_repo.insert_project(user, project) + if not created_project: + raise errors.ValidationError( + message=f"Failed to create a project for migration from v1 (project_v1_id={project_v1_id})." + ) + + result_launcher = None + if session_launcher is not None: + unsaved_session_launcher = session_apispec.SessionLauncherPost( + name=session_launcher.name, + project_id=str(created_project.id), + description=None, + resource_class_id=session_launcher.resource_class_id, + disk_storage=session_launcher.disk_storage, + environment=session_apispec.EnvironmentPostInLauncherHelper( + environment_kind=session_apispec.EnvironmentKind.CUSTOM, + name=session_launcher.name, + description=None, + container_image=session_launcher.container_image, + default_url=session_launcher.default_url, + uid=constants.MIGRATION_UID, + gid=constants.MIGRATION_GID, + working_directory=constants.MIGRATION_WORKING_DIRECTORY, + mount_directory=constants.MIGRATION_MOUNT_DIRECTORY, + port=constants.MIGRATION_PORT, + command=constants.MIGRATION_COMMAND, + args=constants.MIGRATION_ARGS, + is_archived=False, + environment_image_source=session_apispec.EnvironmentImageSourceImage.image, + ), + env_variables=None, + ) + + new_launcher = validate_unsaved_session_launcher( + unsaved_session_launcher, builds_config=self.session_repo.builds_config + ) + result_launcher = await self.session_repo.insert_launcher(user=user, launcher=new_launcher) + + migration_orm = schemas.ProjectMigrationsORM( + project_id=created_project.id, + project_v1_id=project_v1_id, + launcher_id=result_launcher.id if result_launcher else None, + ) + + if migration_orm.project_id is None: + raise errors.ValidationError(message="Project ID cannot be None for the migration entry.") + + session.add(migration_orm) + await session.flush() + await session.refresh(migration_orm) + + return created_project + + async def get_migration_by_v1_id(self, user: base_models.APIUser, v1_id: int) -> models.Project: + """Retrieve all migration records for a given project v1 ID.""" + async with self.session_maker() as session: + stmt = select(schemas.ProjectMigrationsORM).where(schemas.ProjectMigrationsORM.project_v1_id == v1_id) + result = await session.execute(stmt) + project_ids = result.scalars().first() + + if not project_ids: + raise errors.MissingResourceError(message=f"Migration for project v1 with id '{v1_id}' does not exist.") + + # NOTE: Show only those projects that user has access to + allowed_projects = await self.authz.resources_with_permission( + user, user.id, ResourceType.project, Scope.READ + ) + project_id_list = [project_ids.project_id] + stmt = select(schemas.ProjectORM) + stmt = stmt.where(schemas.ProjectORM.id.in_(project_id_list)) + stmt = stmt.where(schemas.ProjectORM.id.in_(allowed_projects)) + result = await session.execute(stmt) + project_orm = result.scalars().first() + + if project_orm is None: + raise errors.MissingResourceError( + message="Project migrated does not exist or you don't have permissions to open it." + ) + + return project_orm.dump() + + async def get_migration_by_project_id( + self, user: base_models.APIUser, project_id: ULID + ) -> models.ProjectMigrationInfo | None: + """Retrieve migration info for a given project v2 ID.""" + + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + project_ids = await self.authz.resources_with_permission(user, user.id, ResourceType.project, Scope.WRITE) + + async with self.session_maker() as session: + stmt_project = select(schemas.ProjectORM.id).where(schemas.ProjectORM.id == project_id) + stmt_project = stmt_project.where(schemas.ProjectORM.id.in_(project_ids)) + res_project = await session.scalar(stmt_project) + if not res_project: + raise errors.MissingResourceError( + message=f"Project with ID {project_id} does not exist or you do not have access to it." + ) + + stmt = select(schemas.ProjectMigrationsORM).where(schemas.ProjectMigrationsORM.project_id == project_id) + result = await session.execute(stmt) + project_migration_orm = result.scalars().first() + + if project_migration_orm: + return models.ProjectMigrationInfo( + project_id=project_id, + v1_id=project_migration_orm.project_v1_id, + launcher_id=project_migration_orm.launcher_id, + ) + + return None diff --git a/components/renku_data_services/project/models.py b/components/renku_data_services/project/models.py index c847ab639..da2e24a21 100644 --- a/components/renku_data_services/project/models.py +++ b/components/renku_data_services/project/models.py @@ -2,13 +2,16 @@ from dataclasses import dataclass, field from datetime import UTC, datetime -from typing import Literal, Optional +from pathlib import PurePosixPath +from typing import Literal from ulid import ULID from renku_data_services.authz.models import Visibility -from renku_data_services.namespace.models import Namespace -from renku_data_services.utils.etag import compute_etag_from_timestamp +from renku_data_services.base_models import ResetType +from renku_data_services.base_models.core import ProjectPath, ProjectSlug +from renku_data_services.namespace.models import GroupNamespace, UserNamespace +from renku_data_services.utils.etag import compute_etag_from_fields, compute_etag_from_timestamp Repository = str @@ -24,26 +27,33 @@ class BaseProject: creation_date: datetime = field(default_factory=lambda: datetime.now(UTC).replace(microsecond=0)) updated_at: datetime | None = field(default=None) repositories: list[Repository] = field(default_factory=list) - description: Optional[str] = None - keywords: Optional[list[str]] = None - documentation: Optional[str] = None - template_id: Optional[ULID] = None + description: str | None = None + keywords: list[str] | None = None + documentation: str | None = None + template_id: ULID | None = None is_template: bool = False + secrets_mount_directory: PurePosixPath | None = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class Project(BaseProject): + """Model for a project which has been persisted in the database.""" + + id: ULID + namespace: UserNamespace | GroupNamespace + secrets_mount_directory: PurePosixPath @property def etag(self) -> str | None: """Entity tag value for this project object.""" if self.updated_at is None: return None - return compute_etag_from_timestamp(self.updated_at) - + return compute_etag_from_fields(self.updated_at, self.path.serialize()) -@dataclass(frozen=True, eq=True, kw_only=True) -class Project(BaseProject): - """Base Project model.""" - - id: ULID - namespace: Namespace + @property + def path(self) -> ProjectPath: + """Get the entity slug path for the project.""" + return self.namespace.path / ProjectSlug(self.slug) @dataclass(frozen=True, eq=True, kw_only=True) @@ -52,6 +62,11 @@ class UnsavedProject(BaseProject): namespace: str + @property + def path(self) -> ProjectPath: + """Get the entity slug path for the project.""" + return ProjectPath.from_strings(self.namespace, self.slug) + @dataclass(frozen=True, eq=True, kw_only=True) class ProjectPatch: @@ -59,6 +74,7 @@ class ProjectPatch: name: str | None namespace: str | None + slug: str | None visibility: Visibility | None repositories: list[Repository] | None description: str | None @@ -66,6 +82,7 @@ class ProjectPatch: documentation: str | None template_id: Literal[""] | None is_template: bool | None + secrets_mount_directory: PurePosixPath | ResetType | None @dataclass @@ -90,3 +107,91 @@ class ProjectPermissions: write: bool delete: bool change_membership: bool + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UnsavedSessionSecretSlot: + """Session secret slot model that has not been persisted.""" + + project_id: ULID + name: str | None + description: str | None + filename: str + + +@dataclass(frozen=True, eq=True, kw_only=True) +class SessionSecretSlot(UnsavedSessionSecretSlot): + """Session secret slot model that has been persisted.""" + + id: ULID + created_by_id: str + creation_date: datetime + updated_at: datetime + + @property + def etag(self) -> str: + """Entity tag value for this session secret slot object.""" + return compute_etag_from_timestamp(self.updated_at) + + +@dataclass(frozen=True, eq=True, kw_only=True) +class SessionSecretSlotPatch: + """Model for changes requested on a session secret slot.""" + + name: str | None + description: str | None + filename: str | None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class SessionSecret: + """Session secret model that has been persisted.""" + + secret_slot: SessionSecretSlot + secret_id: ULID + + +@dataclass(frozen=True, eq=True, kw_only=True) +class SessionSecretPatchExistingSecret: + """Model for changes requested on a session secret.""" + + secret_slot_id: ULID + secret_id: ULID + + +@dataclass(frozen=True, eq=True, kw_only=True) +class SessionSecretPatchSecretValue: + """Model for changes requested on a session secret.""" + + secret_slot_id: ULID + value: str | None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UnsavedProjectMigration: + """Model representing a migration from an old project version that has not been persisted.""" + + project_id: ULID + project_v1_id: int + + +@dataclass(frozen=True, eq=True, kw_only=True) +class ProjectMigration(UnsavedProjectMigration): + """Model representing a migration from an old project version.""" + + id: ULID + migrated_at: datetime = field(default_factory=lambda: datetime.now(UTC).replace(microsecond=0)) + + @property + def etag(self) -> str: + """Entity tag value for this project migration object.""" + return compute_etag_from_fields(self.migrated_at, self.project_v1_id) + + +@dataclass(frozen=True, eq=True, kw_only=True) +class ProjectMigrationInfo: + """Model representing a migration from an old project version.""" + + project_id: ULID + v1_id: int | None + launcher_id: ULID | None diff --git a/components/renku_data_services/project/orm.py b/components/renku_data_services/project/orm.py index fa012019e..e55e8b279 100644 --- a/components/renku_data_services/project/orm.py +++ b/components/renku_data_services/project/orm.py @@ -1,22 +1,27 @@ """SQLAlchemy's schemas for the projects database.""" from datetime import datetime +from pathlib import PurePosixPath from typing import TYPE_CHECKING, Optional from sqlalchemy import Boolean, DateTime, Identity, Index, Integer, MetaData, String, false, func from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, relationship -from sqlalchemy.schema import ForeignKey +from sqlalchemy.schema import ForeignKey, UniqueConstraint from ulid import ULID from renku_data_services.authz import models as authz_models +from renku_data_services.base_models.core import ProjectPath from renku_data_services.base_orm.registry import COMMON_ORM_REGISTRY -from renku_data_services.project import models +from renku_data_services.namespace.models import ProjectNamespace +from renku_data_services.project import constants, models from renku_data_services.project.apispec import Visibility -from renku_data_services.utils.sqlalchemy import ULIDType +from renku_data_services.secrets.orm import SecretORM +from renku_data_services.users.orm import UserORM +from renku_data_services.utils.sqlalchemy import PurePosixPathType, ULIDType if TYPE_CHECKING: - from renku_data_services.namespace.orm import EntitySlugORM + from renku_data_services.namespace.orm import EntitySlugOldORM, EntitySlugORM class BaseORM(MappedAsDataclass, DeclarativeBase): @@ -38,10 +43,20 @@ class ProjectORM(BaseORM): description: Mapped[str | None] = mapped_column("description", String(500)) keywords: Mapped[Optional[list[str]]] = mapped_column("keywords", ARRAY(String(99)), nullable=True) documentation: Mapped[str | None] = mapped_column("documentation", String(), nullable=True, deferred=True) + secrets_mount_directory: Mapped[PurePosixPath] = mapped_column("secrets_mount_directory", PurePosixPathType) + """Location where secrets are mounted in this project's sessions.""" # NOTE: The project slugs table has a foreign key from the projects table, but there is a stored procedure # triggered by the deletion of slugs to remove the project used by the slug. See migration 89aa4573cfa9. slug: Mapped["EntitySlugORM"] = relationship( - lazy="joined", init=False, repr=False, viewonly=True, back_populates="project" + lazy="joined", + init=False, + repr=False, + viewonly=True, + back_populates="project", + # NOTE: If the data_connector ID is not null below then multiple joins are possible here + # since an entity slug for data connector owned by a project and an entity slug for a project + # will be in the same table. + primaryjoin="and_(EntitySlugORM.project_id == ProjectORM.id, EntitySlugORM.data_connector_id.is_(None))", ) repositories: Mapped[list["ProjectRepositoryORM"]] = relationship( back_populates="project", @@ -62,6 +77,16 @@ class ProjectORM(BaseORM): ) """Indicates whether a project is a template project or not.""" + old_slugs: Mapped[list["EntitySlugOldORM"]] = relationship( + back_populates="project", + default_factory=list, + repr=False, + init=False, + viewonly=True, + lazy="selectin", + primaryjoin="and_(EntitySlugOldORM.project_id == ProjectORM.id, EntitySlugOldORM.data_connector_id.is_(None))", + ) + def dump(self, with_documentation: bool = False) -> models.Project: """Create a project model from the ProjectORM.""" return models.Project( @@ -81,6 +106,19 @@ def dump(self, with_documentation: bool = False) -> models.Project: documentation=self.documentation if with_documentation else None, template_id=self.template_id, is_template=self.is_template, + secrets_mount_directory=self.secrets_mount_directory or constants.DEFAULT_SESSION_SECRETS_MOUNT_DIR, + ) + + def dump_as_namespace(self) -> ProjectNamespace: + """Get the namespace representation of the project.""" + return ProjectNamespace( + id=self.slug.namespace.id, + created_by=self.created_by_id, + underlying_resource_id=self.id, + latest_slug=self.slug.slug, + name=self.name, + creation_date=self.creation_date, + path=ProjectPath.from_strings(self.slug.namespace.slug, self.slug.slug), ) @@ -95,3 +133,122 @@ class ProjectRepositoryORM(BaseORM): ForeignKey("projects.id", ondelete="CASCADE"), default=None, index=True ) project: Mapped[Optional[ProjectORM]] = relationship(back_populates="repositories", default=None, repr=False) + + +class SessionSecretSlotORM(BaseORM): + """A slot for a secret in a session.""" + + __tablename__ = "session_secret_slots" + __table_args__ = ( + UniqueConstraint( + "project_id", + "filename", + name="_unique_project_id_filename", + ), + ) + + id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + """ID of this session secret slot.""" + + project_id: Mapped[ULID] = mapped_column(ForeignKey(ProjectORM.id, ondelete="CASCADE"), index=True, nullable=False) + """ID of the project.""" + project: Mapped[ProjectORM] = relationship(init=False, repr=False, lazy="selectin") + + name: Mapped[str] = mapped_column("name", String(99)) + """Name of the session secret slot.""" + + description: Mapped[str | None] = mapped_column("description", String(500)) + """Human-readable description of the session secret slot.""" + + filename: Mapped[str] = mapped_column("filename", String(200)) + """The filename given to the corresponding secret when mounted in the session.""" + + created_by_id: Mapped[str] = mapped_column(ForeignKey(UserORM.keycloak_id), index=True, nullable=False) + """User ID of the creator of the session secret slot.""" + + creation_date: Mapped[datetime] = mapped_column( + "creation_date", DateTime(timezone=True), default=func.now(), nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + "updated_at", + DateTime(timezone=True), + default=None, + server_default=func.now(), + onupdate=func.now(), + nullable=False, + ) + + def dump(self) -> models.SessionSecretSlot: + """Create a session secret slot model from the SessionSecretSlotORM.""" + return models.SessionSecretSlot( + id=self.id, + project_id=self.project_id, + name=self.name, + description=self.description, + filename=self.filename, + created_by_id=self.created_by_id, + creation_date=self.creation_date, + updated_at=self.updated_at, + ) + + +class SessionSecretORM(BaseORM): + """Secrets for a project's sessions.""" + + __tablename__ = "session_secrets" + __table_args__ = ( + UniqueConstraint( + "secret_slot_id", + "user_id", + name="_unique_secret_slot_id_user_id", + ), + ) + + id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + """ID of this session secret.""" + + user_id: Mapped[str] = mapped_column( + ForeignKey(UserORM.keycloak_id, ondelete="CASCADE"), index=True, nullable=False + ) + + secret_slot_id: Mapped[ULID] = mapped_column( + "secret_slot_id", ForeignKey(SessionSecretSlotORM.id, ondelete="CASCADE") + ) + secret_slot: Mapped[SessionSecretSlotORM] = relationship(init=False, repr=False, lazy="selectin") + + secret_id: Mapped[ULID] = mapped_column("secret_id", ForeignKey(SecretORM.id, ondelete="CASCADE")) + secret: Mapped[SecretORM] = relationship(init=False, repr=False, back_populates="session_secrets", lazy="selectin") + + def dump(self) -> models.SessionSecret: + """Create a session secret model from the SessionSecretORM.""" + return models.SessionSecret( + secret_slot=self.secret_slot.dump(), + secret_id=self.secret_id, + ) + + +class ProjectMigrationsORM(BaseORM): + """Tracks project migrations from an old project (project_v1_id) to a new project (project_id).""" + + __tablename__ = "project_migrations" + __table_args__ = (UniqueConstraint("project_v1_id", name="uq_project_v1_id"),) + + id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + + project_v1_id: Mapped[int] = mapped_column("project_v1_id", Integer, nullable=False, unique=True) + """The old project being migrated. Must be unique.""" + + project_id: Mapped[ULID] = mapped_column(ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True) + """The new project of the migration of the v1.""" + + project: Mapped[ProjectORM] = relationship(init=False, repr=False, lazy="selectin") + """Relationship to the new project.""" + + launcher_id: Mapped[Optional[ULID]] = mapped_column(ULIDType, nullable=True, default=None) + """Stores the launcher ID without enforcing a foreign key.""" + + def dump(self) -> models.ProjectMigrationInfo: + """Create a project model from the ProjectMigrationInfoORM.""" + return models.ProjectMigrationInfo( + project_id=self.project_id, v1_id=self.project_v1_id, launcher_id=self.launcher_id + ) diff --git a/components/renku_data_services/repositories/apispec.py b/components/renku_data_services/repositories/apispec.py index d0007b9c9..6c64385e4 100644 --- a/components/renku_data_services/repositories/apispec.py +++ b/components/renku_data_services/repositories/apispec.py @@ -1,12 +1,12 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-10-18T11:06:23+00:00 +# timestamp: 2025-03-19T10:21:12+00:00 from __future__ import annotations from typing import Optional -from pydantic import ConfigDict, Field +from pydantic import ConfigDict, Field, RootModel from renku_data_services.repositories.apispec_base import BaseAPISpec @@ -19,11 +19,13 @@ class RepositoryPermissions(BaseAPISpec): class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): @@ -37,12 +39,12 @@ class RepositoryMetadata(BaseAPISpec): git_http_url: str = Field( ..., description="A URL which can be opened in a browser, i.e. a web page.", - example="https://example.org", + examples=["https://example.org"], ) web_url: str = Field( ..., description="A URL which can be opened in a browser, i.e. a web page.", - example="https://example.org", + examples=["https://example.org"], ) permissions: RepositoryPermissions @@ -54,7 +56,7 @@ class RepositoryProviderMatch(BaseAPISpec): provider_id: str = Field( ..., description='ID of a OAuth2 provider, e.g. "gitlab.com".', - example="some-id", + examples=["some-id"], ) connection_id: Optional[str] = Field( None, diff --git a/components/renku_data_services/repositories/db.py b/components/renku_data_services/repositories/db.py index 6c06fdf66..f48607e13 100644 --- a/components/renku_data_services/repositories/db.py +++ b/components/renku_data_services/repositories/db.py @@ -4,6 +4,7 @@ from typing import Literal from urllib.parse import urlparse +from authlib.integrations.httpx_client import OAuthError from httpx import AsyncClient as HttpClient from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession @@ -120,7 +121,15 @@ async def _get_repository_authenticated( headers = adapter.api_common_headers or dict() if etag: headers["If-None-Match"] = etag - response = await oauth2_client.get(request_url, headers=headers) + try: + response = await oauth2_client.get(request_url, headers=headers) + except OAuthError as err: + if err.error == "bad_refresh_token": + raise errors.InvalidTokenError( + message="The refresh token for the repository has expired or is invalid.", + detail=f"Please reconnect your integration for {repository_url} and try again.", + ) from err + raise if response.status_code == 304: return "304" diff --git a/components/renku_data_services/repositories/provider_adapters.py b/components/renku_data_services/repositories/provider_adapters.py index 5d18a5eb5..558ea3caf 100644 --- a/components/renku_data_services/repositories/provider_adapters.py +++ b/components/renku_data_services/repositories/provider_adapters.py @@ -4,13 +4,15 @@ from urllib.parse import quote, urljoin, urlparse, urlunparse from httpx import Response -from sanic.log import logger from renku_data_services import errors +from renku_data_services.app_config import logging from renku_data_services.connected_services import orm as connected_services_schemas from renku_data_services.connected_services.apispec import ProviderKind from renku_data_services.repositories import external_models, models +logger = logging.getLogger(__name__) + class GitProviderAdapter(ABC): """Defines the functionality of git providers adapters.""" diff --git a/components/renku_data_services/repositories/utils.py b/components/renku_data_services/repositories/utils.py index 69d42ac5a..c2c9dcd14 100644 --- a/components/renku_data_services/repositories/utils.py +++ b/components/renku_data_services/repositories/utils.py @@ -7,7 +7,10 @@ async def probe_repository(repository_url: str) -> bool: """Probe a repository to check if it is publicly available.""" async with httpx.AsyncClient(timeout=5) as client: url = f"{repository_url}/info/refs?service=git-upload-pack" - res = await client.get(url=url, follow_redirects=True) - if res.status_code != 200: + try: + res = await client.get(url=url, follow_redirects=True) + if res.status_code != 200: + return False + return bool(res.headers.get("Content-Type") == "application/x-git-upload-pack-advertisement") + except httpx.HTTPError: return False - return bool(res.headers.get("Content-Type") == "application/x-git-upload-pack-advertisement") diff --git a/components/renku_data_services/search/__init__.py b/components/renku_data_services/search/__init__.py new file mode 100644 index 000000000..80bf4d487 --- /dev/null +++ b/components/renku_data_services/search/__init__.py @@ -0,0 +1 @@ +"""Integration with SOLR and search.""" diff --git a/components/renku_data_services/search/api.spec.yaml b/components/renku_data_services/search/api.spec.yaml new file mode 100644 index 000000000..78b5550b6 --- /dev/null +++ b/components/renku_data_services/search/api.spec.yaml @@ -0,0 +1,535 @@ +openapi: 3.0.2 +info: + title: Renku Data Services API + description: | + This service is the main backend for Renku. It provides information about users, projects, + cloud storage, access to compute resources and many other things. + version: v1 +servers: + - url: /api/data +paths: + /search/query: + get: + summary: Run a search query. + tags: + - search + description: | + Please note: this description is replaced at runtime with the + contents of the query manual markdown file. Everything added + in this file will be replaced. + parameters: + - in: query + description: query parameters + name: params + style: form + explode: true + schema: + $ref: "#/components/schemas/SearchQuery" + responses: + "422": + description: Failed to validate the query parameters + "500": + description: Internal server error. + "503": + description: Temporary internal error. + "200": + description: Search results according to the query. + #NOTE: This is not the standard way we do pagination, but to + #be compatible witht the current search API we make an + #exception for the search here + headers: + x-page: + required: true + schema: + type: integer + format: int32 + x-per-page: + required: true + schema: + type: integer + format: int32 + x-total: + required: true + schema: + type: integer + format: int64 + x-total-pages: + required: true + schema: + type: integer + format: int32 + content: + application/json: + schema: + $ref: "#/components/schemas/SearchResult" + /search/reprovision: + post: + summary: Start a new reprovisioning + description: Only a single reprovisioning is active at any time + responses: + "201": + description: The reprovisioning is/will be started + content: + application/json: + schema: + $ref: "#/components/schemas/Reprovisioning" + "409": + description: A reprovisioning is already started + default: + $ref: "#/components/responses/Error" + tags: + - search + get: + summary: Return status of reprovisioning + responses: + "200": + description: Status of reprovisioning if there's one in progress + content: + application/json: + schema: + $ref: "#/components/schemas/ReprovisioningStatus" + "404": + description: There's no active reprovisioning + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - search + delete: + summary: Stop an active reprovisioning + responses: + "204": + description: The reprovisioning was stopped or there was no one in progress + default: + $ref: "#/components/responses/Error" + tags: + - search + +components: + schemas: + SearchQuery: + description: Query params for the search request + allOf: + - $ref: "#/components/schemas/PaginationRequest" + - properties: + q: + description: The search query. + type: string + default: "" + PaginationRequest: + type: object + additionalProperties: false + properties: + page: + description: Result's page number starting from 1 + type: integer + minimum: 1 + default: 1 + per_page: + description: The number of results per page + type: integer + minimum: 1 + maximum: 100 + default: 20 + FacetData: + title: FacetData + examples: + - entityType: + Project: 15 + User: 3 + type: object + required: + - entityType + - keywords + properties: + entityType: + $ref: '#/components/schemas/Map_EntityType_Int' + keywords: + $ref: '#/components/schemas/Map_EntityType_Int' + Group: + title: Group + examples: + - type: Group + id: 2CAF4C73F50D4514A041C9EDDB025A36 + name: SDSC + namespace: SDSC + path: sdsc + slug: sdsc + description: SDSC group + score: 1.1 + type: object + required: + - id + - name + - path + - slug + - type + properties: + id: + type: string + name: + type: string + path: + type: string + slug: + type: string + description: + type: string + score: + type: number + format: double + type: + type: string + const: Group + Map_EntityType_Int: + title: Map_EntityType_Int + type: object + additionalProperties: + type: integer + format: int32 + PageDef: + title: PageDef + type: object + required: + - limit + - offset + properties: + limit: + type: integer + format: int32 + offset: + type: integer + format: int32 + PageWithTotals: + title: PageWithTotals + type: object + required: + - page + - totalResult + - totalPages + properties: + page: + $ref: '#/components/schemas/PageDef' + totalResult: + type: integer + format: int64 + totalPages: + type: integer + format: int32 + prevPage: + type: integer + format: int32 + nextPage: + type: integer + format: int32 + SearchProject: + title: Project + examples: + - type: Project + id: 01HRA7AZ2Q234CDQWGA052F8MK + name: renku + slug: renku + path: user/renku + namespace: + type: Group + id: 2CAF4C73F50D4514A041C9EDDB025A36 + name: SDSC + path: sdsc + slug: sdsc + description: SDSC group + score: 1.1 + repositories: + - https://github.com/renku + visibility: public + description: Renku project + createdBy: + type: User + id: 1CAF4C73F50D4514A041C9EDDB025A36 + slug: albein + path: albein + firstName: Albert + lastName: Einstein + score: 2.1 + creationDate: '2025-03-06T15:05:42.058323392Z' + keywords: + - data + - science + score: 1 + type: object + required: + - id + - name + - slug + - path + - visibility + - creationDate + - type + properties: + id: + type: string + name: + type: string + slug: + type: string + path: + type: string + namespace: + $ref: '#/components/schemas/UserOrGroup' + repositories: + type: array + items: + type: string + visibility: + $ref: '#/components/schemas/Visibility' + description: + type: string + createdBy: + $ref: '#/components/schemas/User' + creationDate: + type: string + format: date-time + keywords: + type: array + items: + type: string + score: + type: number + format: double + type: + type: string + const: Project + SearchDataConnector: + title: DataConnector + examples: + - type: DataConnector + id: 01HRA7AZ2Q234CDQWGA052F8MK + name: renku + slug: renku + namespace: + type: Group + id: 2CAF4C73F50D4514A041C9EDDB025A36 + name: SDSC + path: sdsc + slug: sdsc + description: SDSC group + score: 1.1 + visibility: public + description: Renku project + createdBy: + type: User + id: 1CAF4C73F50D4514A041C9EDDB025A36 + path: albein + slug: albein + firstName: Albert + lastName: Einstein + score: 2.1 + creationDate: '2025-03-06T15:05:42.058323392Z' + keywords: + - data + - science + score: 1 + type: object + required: + - id + - name + - slug + - path + - visibility + - creationDate + - type + - storageType + - readonly + properties: + id: + type: string + storageType: + type: string + readonly: + type: boolean + name: + type: string + slug: + type: string + path: + type: string + namespace: + $ref: '#/components/schemas/UserOrGroupOrProject' + visibility: + $ref: '#/components/schemas/Visibility' + description: + type: string + createdBy: + $ref: '#/components/schemas/User' + creationDate: + type: string + format: date-time + keywords: + type: array + items: + type: string + score: + type: number + format: double + type: + type: string + const: DataConnector + SearchEntity: + title: SearchEntity + oneOf: + - $ref: '#/components/schemas/Group' + - $ref: '#/components/schemas/SearchProject' + - $ref: '#/components/schemas/User' + - $ref: '#/components/schemas/SearchDataConnector' + discriminator: + propertyName: type + mapping: + Group: '#/components/schemas/Group' + Project: '#/components/schemas/SearchProject' + User: '#/components/schemas/User' + DataConnector: '#/components/schemas/SearchDataConnector' + SearchResult: + title: SearchResult + type: object + required: + - facets + - pagingInfo + properties: + items: + type: array + items: + $ref: '#/components/schemas/SearchEntity' + facets: + $ref: '#/components/schemas/FacetData' + pagingInfo: + $ref: '#/components/schemas/PageWithTotals' + User: + title: User + examples: + - type: User + id: 1CAF4C73F50D4514A041C9EDDB025A36 + path: albein + slug: albein + firstName: Albert + lastName: Einstein + score: 2.1 + type: object + required: + - id + - type + - path + - slug + properties: + id: + type: string + path: + type: string + slug: + type: string + firstName: + type: string + lastName: + type: string + score: + type: number + format: double + type: + type: string + const: User + UserOrGroup: + title: UserOrGroup + examples: + - type: Group + id: 2CAF4C73F50D4514A041C9EDDB025A36 + name: SDSC + namespace: SDSC + description: SDSC group + score: 1.1 + oneOf: + - $ref: '#/components/schemas/Group' + - $ref: '#/components/schemas/User' + discriminator: + propertyName: type + mapping: + Group: '#/components/schemas/Group' + User: '#/components/schemas/User' + UserOrGroupOrProject: + title: UserOrGroupOrProject + examples: + - type: Group + id: 2CAF4C73F50D4514A041C9EDDB025A36 + name: SDSC + namespace: SDSC + description: SDSC group + score: 1.1 + oneOf: + - $ref: '#/components/schemas/Group' + - $ref: '#/components/schemas/User' + - $ref: '#/components/schemas/SearchProject' + discriminator: + propertyName: type + mapping: + Group: '#/components/schemas/Group' + User: '#/components/schemas/User' + Project: '#/components/schemas/SearchProject' + Visibility: + description: Project's visibility levels + type: string + enum: + - private + - public + Reprovisioning: + description: A reprovisioning + type: object + properties: + id: + $ref: "#/components/schemas/Ulid" + start_date: + description: The date and time the reprovisioning was started (in UTC and ISO-8601 format) + type: string + format: date-time + example: "2023-11-01T17:32:28Z" + required: + - id + - start_date + ReprovisioningStatus: + description: Status of a reprovisioning + allOf: + - $ref: "#/components/schemas/Reprovisioning" + Ulid: + description: ULID identifier + type: string + minLength: 26 + maxLength: 26 + pattern: "^[0-7][0-9A-HJKMNP-TV-Z]{25}$" # This is case-insensitive + ErrorResponse: + type: object + properties: + error: + type: object + properties: + code: + type: integer + minimum: 0 + exclusiveMinimum: true + example: 1404 + detail: + type: string + example: A more detailed optional message showing what the problem was + message: + type: string + example: Something went wrong - please try again later + required: + - code + - message + required: + - error + responses: + Error: + description: The schema for all 4xx and 5xx responses + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" diff --git a/components/renku_data_services/search/apispec.py b/components/renku_data_services/search/apispec.py new file mode 100644 index 000000000..326b6472f --- /dev/null +++ b/components/renku_data_services/search/apispec.py @@ -0,0 +1,195 @@ +# generated by datamodel-codegen: +# filename: api.spec.yaml +# timestamp: 2025-06-12T09:56:04+00:00 + +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from typing import Dict, List, Literal, Optional, Union + +from pydantic import ConfigDict, Field, RootModel +from renku_data_services.search.apispec_base import BaseAPISpec + + +class PaginationRequest(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + page: int = Field(1, description="Result's page number starting from 1", ge=1) + per_page: int = Field( + 20, description="The number of results per page", ge=1, le=100 + ) + + +class Group(BaseAPISpec): + id: str + name: str + path: str + slug: str + description: Optional[str] = None + score: Optional[float] = None + type: Literal["Group"] = "Group" + + +class MapEntityTypeInt(RootModel[Optional[Dict[str, int]]]): + root: Optional[Dict[str, int]] = None + + +class PageDef(BaseAPISpec): + limit: int + offset: int + + +class PageWithTotals(BaseAPISpec): + page: PageDef + totalResult: int + totalPages: int + prevPage: Optional[int] = None + nextPage: Optional[int] = None + + +class User(BaseAPISpec): + id: str + path: str + slug: str + firstName: Optional[str] = None + lastName: Optional[str] = None + score: Optional[float] = None + type: Literal["User"] = "User" + + +class UserOrGroup(RootModel[Union[Group, User]]): + root: Union[Group, User] = Field( + ..., + discriminator="type", + examples=[ + { + "type": "Group", + "id": "2CAF4C73F50D4514A041C9EDDB025A36", + "name": "SDSC", + "namespace": "SDSC", + "description": "SDSC group", + "score": 1.1, + } + ], + title="UserOrGroup", + ) + + +class Visibility(Enum): + private = "private" + public = "public" + + +class Ulid(RootModel[str]): + root: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + + +class Error(BaseAPISpec): + code: int = Field(..., examples=[1404], gt=0) + detail: Optional[str] = Field( + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] + ) + + +class ErrorResponse(BaseAPISpec): + error: Error + + +class SearchQuery(PaginationRequest): + q: str = Field("", description="The search query.") + + +class FacetData(BaseAPISpec): + entityType: MapEntityTypeInt + keywords: MapEntityTypeInt + + +class SearchProject(BaseAPISpec): + id: str + name: str + slug: str + path: str + namespace: Optional[UserOrGroup] = None + repositories: Optional[List[str]] = None + visibility: Visibility + description: Optional[str] = None + createdBy: Optional[User] = None + creationDate: datetime + keywords: Optional[List[str]] = None + score: Optional[float] = None + type: Literal["Project"] = "Project" + + +class UserOrGroupOrProject(RootModel[Union[Group, User, SearchProject]]): + root: Union[Group, User, SearchProject] = Field( + ..., + discriminator="type", + examples=[ + { + "type": "Group", + "id": "2CAF4C73F50D4514A041C9EDDB025A36", + "name": "SDSC", + "namespace": "SDSC", + "description": "SDSC group", + "score": 1.1, + } + ], + title="UserOrGroupOrProject", + ) + + +class Reprovisioning(BaseAPISpec): + id: Ulid + start_date: datetime = Field( + ..., + description="The date and time the reprovisioning was started (in UTC and ISO-8601 format)", + examples=["2023-11-01T17:32:28Z"], + ) + + +class ReprovisioningStatus(Reprovisioning): + pass + + +class SearchQueryGetParametersQuery(BaseAPISpec): + params: Optional[SearchQuery] = None + + +class SearchDataConnector(BaseAPISpec): + id: str + storageType: str + readonly: bool + name: str + slug: str + path: str + namespace: Optional[UserOrGroupOrProject] = None + visibility: Visibility + description: Optional[str] = None + createdBy: Optional[User] = None + creationDate: datetime + keywords: Optional[List[str]] = None + score: Optional[float] = None + type: Literal["DataConnector"] = "DataConnector" + + +class SearchEntity(RootModel[Union[Group, SearchProject, User, SearchDataConnector]]): + root: Union[Group, SearchProject, User, SearchDataConnector] = Field( + ..., discriminator="type", title="SearchEntity" + ) + + +class SearchResult(BaseAPISpec): + items: Optional[List[SearchEntity]] = None + facets: FacetData + pagingInfo: PageWithTotals diff --git a/components/renku_data_services/message_queue/apispec_base.py b/components/renku_data_services/search/apispec_base.py similarity index 100% rename from components/renku_data_services/message_queue/apispec_base.py rename to components/renku_data_services/search/apispec_base.py diff --git a/components/renku_data_services/search/authz.py b/components/renku_data_services/search/authz.py new file mode 100644 index 000000000..5bc66547e --- /dev/null +++ b/components/renku_data_services/search/authz.py @@ -0,0 +1,96 @@ +"""Utility functions for integrating authzed into search.""" + +import re +from collections.abc import Iterable + +from authzed.api.v1 import AsyncClient as AuthzClient +from authzed.api.v1 import Consistency, LookupResourcesRequest, ObjectReference, SubjectReference +from authzed.api.v1.permission_service_pb2 import LOOKUP_PERMISSIONSHIP_HAS_PERMISSION + +from renku_data_services.app_config import logging +from renku_data_services.authz.models import Role, Scope +from renku_data_services.base_models.core import ResourceType +from renku_data_services.base_models.nel import Nel +from renku_data_services.solr.entity_documents import EntityType + +logger = logging.getLogger(__name__) + +__object_id_regex = re.compile("^[a-zA-Z0-9/_|\\-=+]{1,}$") + + +def __check_authz_object_id(id: str) -> bool: + """Checks whether the given string is a valid authz object id. + + Unfortunately, I couldn't find anything in the authz python + package that would do it. You can safely create invalid + ObjectReferences and send them to authz, only the server will give + a 400 error back + + The regex is copied from the error response sending the request + with bad data. + + Since wildcards are not supported for lookup resources, this part + is removed from the regex and disallowed here. + + """ + return __object_id_regex.fullmatch(id) is not None + + +async def __resources_with_permission( + client: AuthzClient, user_id: str, entity_types: Iterable[EntityType], permission_name: str +) -> list[str]: + """Get all the resource IDs that a specific user has the given permission/role.""" + result: list[str] = [] + + if not __check_authz_object_id(user_id): + logger.debug(f"The user-id passed is not a valid spicedb/authz id: {user_id}") + return result + + user_ref = SubjectReference(object=ObjectReference(object_type=ResourceType.user.value, object_id=user_id)) + + for et in entity_types: + req = LookupResourcesRequest( + consistency=Consistency(fully_consistent=True), + resource_object_type=et.to_resource_type.value, + permission=permission_name, + subject=user_ref, + ) + response = client.LookupResources(req) + async for o in response: + if o.permissionship == LOOKUP_PERMISSIONSHIP_HAS_PERMISSION: + result.append(o.resource_object_id) + + logger.debug(f"Found ids for user:{user_id} perm={permission_name} ets={entity_types}: {result}") + return result + + +async def get_non_public_read(client: AuthzClient, user_id: str, ets: Iterable[EntityType]) -> list[str]: + """Return all resource ids the given user as access to, that are not public.""" + ets = list(ets) + if EntityType.user in ets: + ets.remove(EntityType.user) # user don't have this relation + return await __resources_with_permission(client, user_id, ets, Scope.NON_PUBLIC_READ.value) + + +async def get_ids_for_roles( + client: AuthzClient, user_id: str, roles: Nel[Role], ets: Iterable[EntityType], direct_membership: bool +) -> list[str]: + """Return all resource ids for which the give user has one of the given roles.""" + ets = list(ets) + if EntityType.user in ets: + ets.remove(EntityType.user) # user don't have this relation + result: set[str] = set() + + for role in roles: + match role: + case Role.VIEWER: + permission = Scope.DIRECT_MEMBER.value if direct_membership else Scope.EXCLUSIVE_MEMBER.value + case Role.EDITOR: + permission = role.value if direct_membership else Scope.EXCLUSIVE_EDITOR.value + case Role.OWNER: + permission = role.value if direct_membership else Scope.EXCLUSIVE_OWNER.value + + r = await __resources_with_permission(client, user_id, ets, permission) + result.update(r) + + return list(result) diff --git a/components/renku_data_services/search/blueprints.py b/components/renku_data_services/search/blueprints.py new file mode 100644 index 000000000..9599b00b9 --- /dev/null +++ b/components/renku_data_services/search/blueprints.py @@ -0,0 +1,101 @@ +"""Search/reprovisioning blueprint.""" + +from dataclasses import dataclass + +from sanic import HTTPResponse, Request, json +from sanic.response import JSONResponse + +import renku_data_services.base_models as base_models +import renku_data_services.search.core as core +from renku_data_services.app_config import logging +from renku_data_services.authz.authz import Authz +from renku_data_services.base_api.auth import authenticate, only_admins +from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint +from renku_data_services.base_api.misc import validate_query +from renku_data_services.base_models.metrics import MetricsService +from renku_data_services.search.apispec import SearchQuery +from renku_data_services.search.reprovision import SearchReprovision +from renku_data_services.search.solr_user_query import UsernameResolve +from renku_data_services.search.user_query_parser import QueryParser +from renku_data_services.solr.solr_client import SolrClientConfig + +logger = logging.getLogger(__name__) + + +@dataclass(kw_only=True) +class SearchBP(CustomBlueprint): + """Handlers for search.""" + + authenticator: base_models.Authenticator + solr_config: SolrClientConfig + search_reprovision: SearchReprovision + authz: Authz + username_resolve: UsernameResolve + metrics: MetricsService + + def post(self) -> BlueprintFactoryResponse: + """Start a new reprovisioning.""" + + @authenticate(self.authenticator) + @only_admins + async def _post(request: Request, user: base_models.APIUser) -> HTTPResponse | JSONResponse: + reprovisioning = await self.search_reprovision.acquire_reprovision() + + request.app.add_task( + self.search_reprovision.init_reprovision(user, reprovisioning=reprovisioning), + name=f"reprovisioning-{reprovisioning.id}", + ) + + return json({"id": str(reprovisioning.id), "start_date": reprovisioning.start_date.isoformat()}, 201) + + return "/search/reprovision", ["POST"], _post + + def get_status(self) -> BlueprintFactoryResponse: + """Get reprovisioning status.""" + + @authenticate(self.authenticator) + async def _get_status(_: Request, __: base_models.APIUser) -> JSONResponse | HTTPResponse: + reprovisioning = await self.search_reprovision.get_current_reprovision() + if not reprovisioning: + return HTTPResponse(status=404) + return json({"id": str(reprovisioning.id), "start_date": reprovisioning.start_date.isoformat()}) + + return "/search/reprovision", ["GET"], _get_status + + def delete(self) -> BlueprintFactoryResponse: + """Stop reprovisioning (if any).""" + + @authenticate(self.authenticator) + @only_admins + async def _delete(_: Request, __: base_models.APIUser) -> HTTPResponse: + await self.search_reprovision.kill_reprovision_lock() + return HTTPResponse(status=204) + + return "/search/reprovision", ["DELETE"], _delete + + def query(self) -> BlueprintFactoryResponse: + """Run a query.""" + + @authenticate(self.authenticator) + @validate_query(query=SearchQuery) + async def _query(_: Request, user: base_models.APIUser, query: SearchQuery) -> HTTPResponse | JSONResponse: + per_page = query.per_page + offset = (query.page - 1) * per_page + uq = await QueryParser.parse(query.q) + logger.debug(f"Running search query: {query}") + + result = await core.query( + self.authz.client, self.username_resolve, self.solr_config, uq, user, per_page, offset + ) + await self.metrics.search_queried(user) + return json( + result.model_dump(by_alias=True, exclude_none=True, mode="json"), + headers={ + "x-page": f"{query.page}", + "x-per-page": f"{per_page}", + "x-total": f"{result.pagingInfo.totalPages}", + "x-total-pages": f"{result.pagingInfo.totalPages}", + }, + ) + + return "/search/query", ["GET"], _query diff --git a/components/renku_data_services/search/converters.py b/components/renku_data_services/search/converters.py new file mode 100644 index 000000000..b4294b018 --- /dev/null +++ b/components/renku_data_services/search/converters.py @@ -0,0 +1,148 @@ +"""Conversion functions.""" + +from typing import cast + +from renku_data_services.authz.models import Visibility +from renku_data_services.search.apispec import ( + Group as GroupApi, +) +from renku_data_services.search.apispec import ( + SearchDataConnector as DataConnectorApi, +) +from renku_data_services.search.apispec import ( + SearchEntity, + UserOrGroup, + UserOrGroupOrProject, +) +from renku_data_services.search.apispec import ( + SearchProject as ProjectApi, +) +from renku_data_services.search.apispec import ( + User as UserApi, +) +from renku_data_services.search.apispec import ( + Visibility as VisibilityApi, +) +from renku_data_services.solr.entity_documents import ( + DataConnector as DataConnectorDocument, +) +from renku_data_services.solr.entity_documents import ( + EntityDocReader, +) +from renku_data_services.solr.entity_documents import ( + Group as GroupDocument, +) +from renku_data_services.solr.entity_documents import ( + Project as ProjectDocument, +) +from renku_data_services.solr.entity_documents import ( + User as UserDocument, +) + + +def from_visibility(v: Visibility) -> VisibilityApi: + """Creates a apispec visibility.""" + match v: + case Visibility.PUBLIC: + return VisibilityApi.public + case Visibility.PRIVATE: + return VisibilityApi.private + + +def from_user(user: UserDocument) -> UserApi: + """Creates an apispec user from a solr user document.""" + return UserApi( + id=user.id, + slug=user.slug.value, + path=user.path, + firstName=user.firstName, + lastName=user.lastName, + score=user.score, + ) + + +def from_group(group: GroupDocument) -> GroupApi: + """Creates a apispec group from a solr group document.""" + return GroupApi( + id=str(group.id), + name=group.name, + slug=group.slug.value, + path=group.path, + description=group.description, + score=group.score, + ) + + +def __creator_details(e: ProjectDocument | DataConnectorDocument) -> UserApi | None: + if e.creatorDetails is not None and e.creatorDetails.docs != []: + return from_user(UserDocument.from_dict(e.creatorDetails.docs[0])) + else: + return None + + +def __namespace_details(d: ProjectDocument) -> UserOrGroup | None: + if d.namespaceDetails is not None and d.namespaceDetails.docs != []: + e = EntityDocReader.from_dict(d.namespaceDetails.docs[0]) + if e is not None: + return UserOrGroup(cast(UserApi | GroupApi, from_entity(e).root)) + return None + + +def __namespace_details_dc(d: DataConnectorDocument) -> UserOrGroupOrProject | None: + if d.namespaceDetails is not None and d.namespaceDetails.docs != []: + e = EntityDocReader.from_dict(d.namespaceDetails.docs[0]) + if e is not None: + return UserOrGroupOrProject(cast(UserApi | GroupApi | ProjectApi, from_entity(e).root)) + return None + + +def from_project(project: ProjectDocument) -> ProjectApi: + """Creates a apispec project from a solr project document.""" + return ProjectApi( + id=str(project.id), + name=project.name, + slug=project.slug.value, + path=project.path, + namespace=__namespace_details(project), + repositories=project.repositories, + visibility=from_visibility(project.visibility), + description=project.description, + createdBy=__creator_details(project), + creationDate=project.creationDate, + keywords=project.keywords, + score=project.score, + ) + + +def from_data_connector(dc: DataConnectorDocument) -> DataConnectorApi: + """Creates an apispec data connector from a solr data connector document.""" + return DataConnectorApi( + id=str(dc.id), + name=dc.name, + slug=dc.slug.value, + path=dc.path, + namespace=__namespace_details_dc(dc), + visibility=from_visibility(dc.visibility), + description=dc.description, + createdBy=__creator_details(dc), + creationDate=dc.creationDate, + keywords=dc.keywords, + storageType=dc.storageType, + readonly=dc.readonly, + score=dc.score, + ) + + +def from_entity( + entity: GroupDocument | ProjectDocument | UserDocument | DataConnectorDocument, +) -> SearchEntity: + """Creates an apispec entity from a solr entity document.""" + match entity: + case UserDocument() as d: + return SearchEntity(from_user(d)) + case GroupDocument() as d: + return SearchEntity(from_group(d)) + case ProjectDocument() as d: + return SearchEntity(from_project(d)) + case DataConnectorDocument() as d: + return SearchEntity(from_data_connector(d)) diff --git a/components/renku_data_services/search/core.py b/components/renku_data_services/search/core.py new file mode 100644 index 000000000..2d776a693 --- /dev/null +++ b/components/renku_data_services/search/core.py @@ -0,0 +1,173 @@ +"""Business logic for searching.""" + +import asyncio +from collections.abc import Iterable +from datetime import UTC, datetime + +from authzed.api.v1 import AsyncClient as AuthzClient + +import renku_data_services.search.apispec as apispec +import renku_data_services.search.solr_token as st +from renku_data_services.app_config import logging +from renku_data_services.authz.models import Role +from renku_data_services.base_models import APIUser +from renku_data_services.base_models.nel import Nel +from renku_data_services.search import authz, converters +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.models import DeleteDoc +from renku_data_services.search.solr_user_query import ( + AdminRole, + AuthAccess, + Context, + QueryInterpreter, + SolrUserQuery, + UsernameResolve, + UserRole, +) +from renku_data_services.search.user_query import UserQuery +from renku_data_services.solr.entity_documents import DataConnector, EntityDocReader, EntityType, Group, Project, User +from renku_data_services.solr.entity_schema import Fields +from renku_data_services.solr.solr_client import ( + DefaultSolrClient, + FacetTerms, + RawDocument, + SolrClient, + SolrClientConfig, + SolrDocument, + SolrQuery, + SubQuery, +) + +logger = logging.getLogger(__name__) + + +async def update_solr(search_updates_repo: SearchUpdatesRepo, solr_client: SolrClient, batch_size: int) -> None: + """Selects entries from the search staging table and updates SOLR.""" + counter = 0 + while True: + entries = await search_updates_repo.select_next(batch_size) + if entries == []: + break + + ids = [e.id for e in entries] + try: + docs: list[SolrDocument] = [RawDocument(e.payload) for e in entries] + result = await solr_client.upsert(docs) + if result == "VersionConflict": + logger.error(f"There was a version conflict updating search entities: {docs}") + await search_updates_repo.mark_reset(ids) + await asyncio.sleep(1) + else: + counter = counter + len(entries) + await search_updates_repo.mark_processed(ids) + + try: + # In the above upsert, documents could get + # "soft-deleted". This would finally remove them. As + # the success of this is not production critical, + # errors are only logged + await solr_client.delete(DeleteDoc.solr_query()) + except Exception as de: + logger.error("Error when removing soft-deleted documents", exc_info=de) + + except Exception as e: + logger.error(f"Error while updating solr with entities {ids}", exc_info=e) + try: + await search_updates_repo.mark_failed(ids) + except Exception as e2: + logger.error("Error while setting search entities to failed", exc_info=e2) + + if counter > 0: + logger.info(f"Updated {counter} entries in SOLR") + + +async def _renku_query( + authz_client: AuthzClient, ctx: Context, uq: SolrUserQuery, limit: int, offset: int +) -> SolrQuery: + """Create the final solr query embedding the given user query.""" + logger.debug(f"Searching as user: {ctx.role or "anonymous"}") + role_constraint: list[str] = [st.public_only()] + match ctx.role: + case AdminRole(): + role_constraint = [] + case UserRole() as u: + ids = await authz.get_non_public_read(authz_client, u.id, ctx.get_entity_types()) + role_constraint = [st.public_or_ids(ids)] + + return ( + SolrQuery.query_all_fields(uq.query_str(), limit, offset) + .with_sort(uq.sort) + .add_filter( + st.created_by_exists(), + ) + .add_filter(*role_constraint) + .with_facet(FacetTerms(name=Fields.entity_type, field=Fields.entity_type)) + .with_facet(FacetTerms(name=Fields.keywords, field=Fields.keywords)) + .add_sub_query( + Fields.creator_details, + SubQuery( + query="{!terms f=id v=$row.createdBy}", filter="{!terms f=_kind v=fullentity}", limit=1 + ).with_all_fields(), + ) + .add_sub_query( + Fields.namespace_details, + SubQuery( + query="{!terms f=path v=$row.namespacePath}", + filter="(isNamespace:true AND _kind:fullentity)", + limit=1, + ).with_all_fields(), + ) + ) + + +async def query( + authz_client: AuthzClient, + username_resolve: UsernameResolve, + solr_config: SolrClientConfig, + query: UserQuery, + user: APIUser, + limit: int, + offset: int, +) -> apispec.SearchResult: + """Run the given user query against solr and return the result.""" + + logger.debug(f"User search query: {query.render()}") + + class RoleAuthAccess(AuthAccess): + async def get_ids_for_role( + self, user_id: str, roles: Nel[Role], ets: Iterable[EntityType], direct_membership: bool + ) -> list[str]: + return await authz.get_ids_for_roles(authz_client, user_id, roles, ets, direct_membership) + + ctx = ( + await Context.for_api_user(datetime.now(), UTC, user) + .with_auth_access(RoleAuthAccess()) + .with_username_resolve(username_resolve) + .with_requested_entity_types(query) + ) + + suq = await QueryInterpreter.default().run(ctx, query) + solr_query = await _renku_query(authz_client, ctx, suq, limit, offset) + logger.debug(f"Solr query: {solr_query.to_dict()}") + + async with DefaultSolrClient(solr_config) as client: + results = await client.query(solr_query) + total_pages = int(results.response.num_found / limit) + if results.response.num_found % limit != 0: + total_pages += 1 + + solr_docs: list[Group | Project | DataConnector | User] = results.response.read_to(EntityDocReader.from_dict) + + docs = list(map(converters.from_entity, solr_docs)) + return apispec.SearchResult( + items=docs, + facets=apispec.FacetData( + entityType=apispec.MapEntityTypeInt(results.facets.get_counts(Fields.entity_type).to_simple_dict()), + keywords=apispec.MapEntityTypeInt(results.facets.get_counts(Fields.keywords).to_simple_dict()), + ), + pagingInfo=apispec.PageWithTotals( + page=apispec.PageDef(limit=limit, offset=offset), + totalPages=int(total_pages), + totalResult=results.response.num_found, + ), + ) diff --git a/components/renku_data_services/search/db.py b/components/renku_data_services/search/db.py new file mode 100644 index 000000000..72f434ae6 --- /dev/null +++ b/components/renku_data_services/search/db.py @@ -0,0 +1,274 @@ +"""Database operations for search.""" + +import json +from collections.abc import Callable +from datetime import datetime +from textwrap import dedent +from typing import Any, cast + +from sqlalchemy import delete, select, text, update +from sqlalchemy.ext.asyncio import AsyncSession +from ulid import ULID + +from renku_data_services.base_models.core import Slug +from renku_data_services.data_connectors.models import DataConnector, GlobalDataConnector +from renku_data_services.namespace.models import Group +from renku_data_services.project.models import Project +from renku_data_services.search.models import DeleteDoc, Entity +from renku_data_services.search.orm import RecordState, SearchUpdatesORM +from renku_data_services.solr.entity_documents import DataConnector as DataConnectorDoc +from renku_data_services.solr.entity_documents import Group as GroupDoc +from renku_data_services.solr.entity_documents import Project as ProjectDoc +from renku_data_services.solr.entity_documents import User as UserDoc +from renku_data_services.solr.solr_client import DocVersions +from renku_data_services.users.models import UserInfo + + +def _user_to_entity_doc(user: UserInfo) -> UserDoc: + return UserDoc( + path=user.namespace.path.serialize(), + slug=user.namespace.path.first, + id=user.id, + firstName=user.first_name, + lastName=user.last_name, + version=DocVersions.off(), + ) + + +def _group_to_entity_doc(group: Group) -> GroupDoc: + return GroupDoc( + path=group.slug, + slug=Slug(group.slug), + id=group.id, + name=group.name, + description=group.description, + version=DocVersions.off(), + ) + + +def _project_to_entity_doc(p: Project) -> ProjectDoc: + return ProjectDoc( + namespace_path=p.namespace.path.serialize(), + path=p.path.serialize(), + id=p.id, + name=p.name, + slug=Slug.from_name(p.slug), + visibility=p.visibility, + createdBy=p.created_by, + creationDate=p.creation_date, + repositories=p.repositories, + description=p.description, + keywords=p.keywords if p.keywords is not None else [], + version=DocVersions.off(), + ) + + +def _dataconnector_to_entity_doc(dc: DataConnector | GlobalDataConnector) -> DataConnectorDoc: + ns = dc.namespace.path.serialize() if isinstance(dc, DataConnector) else None + pt = dc.path.serialize() if isinstance(dc, DataConnector) else dc.slug + return DataConnectorDoc( + id=dc.id, + path=pt, + name=dc.name, + storageType=dc.storage.storage_type, + readonly=dc.storage.readonly, + slug=Slug.from_name(dc.slug), + visibility=dc.visibility, + createdBy=dc.created_by, + creationDate=dc.creation_date, + namespace_path=ns, + description=dc.description, + keywords=dc.keywords if dc.keywords is not None else [], + version=DocVersions.off(), + ) + + +class SearchUpdatesRepo: + """Db operations for the search updates table. + + NOTE: This does not apply any authentication or authorization to calls. + """ + + def __init__(self, session_maker: Callable[..., AsyncSession]) -> None: + self.session_maker = session_maker + + async def find_by_id(self, id: ULID) -> SearchUpdatesORM | None: + """Find a row by its primary key.""" + async with self.session_maker() as session: + return await session.get(SearchUpdatesORM, id) + + def __make_params(self, entity: Entity, started: datetime) -> dict[str, Any]: + match entity: + case Group() as g: + dg = _group_to_entity_doc(g) + return { + "entity_id": str(dg.id), + "entity_type": "Group", + "created_at": started, + "payload": json.dumps(dg.to_dict()), + } + + case UserInfo() as u: + du = _user_to_entity_doc(u) + return { + "entity_id": du.id, + "entity_type": "User", + "created_at": started, + "payload": json.dumps(du.to_dict()), + } + + case Project() as p: + dp = _project_to_entity_doc(p) + return { + "entity_id": str(dp.id), + "entity_type": "Project", + "created_at": started, + "payload": json.dumps(dp.to_dict()), + } + + case DataConnector() as d: + dc = _dataconnector_to_entity_doc(d) + return { + "entity_id": str(dc.id), + "entity_type": "DataConnector", + "created_at": started, + "payload": json.dumps(dc.to_dict()), + } + + case GlobalDataConnector() as d: + dc = _dataconnector_to_entity_doc(d) + return { + "entity_id": str(dc.id), + "entity_type": "DataConnector", + "created_at": started, + "payload": json.dumps(dc.to_dict()), + } + + case DeleteDoc() as d: + return { + "entity_id": d.id, + "entity_type": d.entity_type, + "created_at": started, + "payload": json.dumps(d.to_dict()), + } + + async def upsert(self, entity: Entity, started_at: datetime | None = None) -> ULID: + """Add entity documents to the staging table. + + If an entity with same id already exists, it is updated. + """ + started = started_at if started_at is not None else datetime.now() + params = self.__make_params(entity, started) + async with self.session_maker() as session, session.begin(): + result = await session.execute( + text( + dedent("""\ + WITH new_user AS ( + INSERT INTO events.search_updates + (entity_id, entity_type, created_at, payload) + VALUES + (:entity_id, :entity_type, :created_at, :payload) + ON CONFLICT ("entity_id") DO UPDATE + SET created_at = :created_at, payload = :payload + RETURNING id + ) SELECT * from new_user UNION + SELECT id FROM events.search_updates WHERE entity_id = :entity_id AND entity_type = :entity_type + """) + ), + params, + ) + await session.commit() + el = result.first() + if el is None: + raise Exception(f"Inserting {entity} did not result in returning an id.") + return cast(ULID, ULID.from_str(el.id)) # huh? mypy wants this cast + + async def insert(self, entity: Entity, started_at: datetime | None) -> ULID: + """Insert a entity document into the staging table. + + Do nothing if it already exists. + """ + started = started_at if started_at is not None else datetime.now() + params = self.__make_params(entity, started) + async with self.session_maker() as session, session.begin(): + result = await session.execute( + text( + dedent(""" + WITH new_entity AS ( + INSERT INTO events.search_updates + (entity_id, entity_type, created_at, payload) + VALUES + (:entity_id, :entity_type, :created_at, :payload) + ON CONFLICT ("entity_id") DO NOTHING + RETURNING id + ) SELECT * from new_entity UNION + SELECT id FROM events.search_updates WHERE entity_id = :entity_id AND entity_type = :entity_type + """) + ), + params, + ) + await session.commit() + el = result.first() + if el is None: + raise Exception(f"Inserting {entity} did not result in returning an id.") + return cast(ULID, ULID.from_str(el.id)) + + async def clear_all(self) -> None: + """Clears the staging table of all data.""" + async with self.session_maker() as session, session.begin(): + await session.execute(text("TRUNCATE TABLE events.search_updates")) + return None + + async def select_next(self, size: int) -> list[SearchUpdatesORM]: + """Select and mark the next records and return them in a list.""" + async with self.session_maker() as session, session.begin(): + stmt = ( + select(SearchUpdatesORM) + .where(SearchUpdatesORM.state.is_(None)) + # lock retrieved rows, skip already locked ones, to deal with concurrency + .with_for_update(skip_locked=True) + .limit(size) + .order_by(SearchUpdatesORM.id) + ) + result = await session.scalars(stmt) + records = result.all() + for r in records: + r.state = RecordState.Locked + session.add(r) + + return list(records) + + async def __mark_rows(self, state: RecordState | None, ids: list[ULID]) -> None: + """Mark rows with the given state.""" + async with self.session_maker() as session, session.begin(): + stmt = ( + update(SearchUpdatesORM) + .where(SearchUpdatesORM.state == RecordState.Locked) + .where(SearchUpdatesORM.id.in_(ids)) + .values(state=state) + ) + await session.execute(stmt) + + async def mark_processed(self, ids: list[ULID]) -> None: + """Remove processed rows.""" + async with self.session_maker() as session, session.begin(): + stmt = ( + delete(SearchUpdatesORM) + .where(SearchUpdatesORM.state == RecordState.Locked) + .where(SearchUpdatesORM.id.in_(ids)) + ) + await session.execute(stmt) + + async def mark_reset(self, ids: list[ULID]) -> None: + """Mark these rows as open so they can be processed.""" + await self.__mark_rows(None, ids) + + async def mark_failed(self, ids: list[ULID]) -> None: + """Marke these rows as failed.""" + await self.__mark_rows(RecordState.Failed, ids) + + async def reset_locked(self) -> None: + """Resets all locked rows to open.""" + async with self.session_maker() as session, session.begin(): + stmt = update(SearchUpdatesORM).where(SearchUpdatesORM.state == RecordState.Locked).values(state=None) + await session.execute(stmt) diff --git a/components/renku_data_services/search/decorators.py b/components/renku_data_services/search/decorators.py new file mode 100644 index 000000000..c8a384988 --- /dev/null +++ b/components/renku_data_services/search/decorators.py @@ -0,0 +1,113 @@ +"""Decorators to support search integration.""" + +import functools +from collections.abc import Awaitable, Callable +from typing import Concatenate, ParamSpec, Protocol, TypeVar, cast + +from sqlalchemy.ext.asyncio import AsyncSession + +from renku_data_services.app_config import logging +from renku_data_services.data_connectors.models import ( + DataConnector, + DataConnectorUpdate, + DeletedDataConnector, + GlobalDataConnector, +) +from renku_data_services.errors import errors +from renku_data_services.namespace.models import DeletedGroup, Group +from renku_data_services.project.models import DeletedProject, Project, ProjectUpdate +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.models import DeleteDoc +from renku_data_services.users.models import DeletedUser, UserInfo, UserInfoUpdate + +logger = logging.getLogger(__name__) + + +class WithSearchUpdateRepo(Protocol): + """The protocol required for a class to send messages to a message queue.""" + + @property + def search_updates_repo(self) -> SearchUpdatesRepo: + """Returns the repository for updating search documents.""" + ... + + +_P = ParamSpec("_P") +_T = TypeVar("_T") +_WithSearchUpdateRepo = TypeVar("_WithSearchUpdateRepo", bound=WithSearchUpdateRepo) + + +def update_search_document( + f: Callable[Concatenate[_WithSearchUpdateRepo, _P], Awaitable[_T]], +) -> Callable[Concatenate[_WithSearchUpdateRepo, _P], Awaitable[_T]]: + """Calls the wrapped function and updates the search_update table with corresponding data.""" + + @functools.wraps(f) + async def func_wrapper(self: _WithSearchUpdateRepo, *args: _P.args, **kwargs: _P.kwargs) -> _T: + session = kwargs.get("session") + if not isinstance(session, AsyncSession): + raise errors.ProgrammingError( + message="The decorator that populates the message queue expects a valid database session " + f"in the keyword arguments instead it got {type(session)}." + ) + result = await f(self, *args, **kwargs) + if result is None: + return result + + match result: + case Project() as p: + await self.search_updates_repo.upsert(p) + + case ProjectUpdate() as p: + await self.search_updates_repo.upsert(p.new) + + case DeletedProject() as p: + record = DeleteDoc.project(p.id) + await self.search_updates_repo.upsert(record) + + case UserInfo() as u: + await self.search_updates_repo.upsert(u) + + case UserInfoUpdate() as u: + await self.search_updates_repo.upsert(u.new) + + case DeletedUser() as u: + record = DeleteDoc.user(u.id) + await self.search_updates_repo.upsert(record) + + case Group() as g: + await self.search_updates_repo.upsert(g) + + case DeletedGroup() as g: + record = DeleteDoc.group(g.id) + await self.search_updates_repo.upsert(record) + + case DataConnector() as dc: + await self.search_updates_repo.upsert(dc) + + case GlobalDataConnector() as dc: + await self.search_updates_repo.upsert(dc) + + case DataConnectorUpdate() as dc: + await self.search_updates_repo.upsert(dc.new) + + case DeletedDataConnector() as dc: + record = DeleteDoc.data_connector(dc.id) + await self.search_updates_repo.upsert(record) + + case list(): + match result: + case [UserInfo(), *_] as els: + users = cast(list[UserInfo], els) + for u in users: + await self.search_updates_repo.upsert(u) + + case _: + error = errors.ProgrammingError( + message=f"Encountered unhandled search document of type '{result.__class__.__name__}'" + ) + logger.error(error) + + return result + + return func_wrapper diff --git a/components/renku_data_services/search/models.py b/components/renku_data_services/search/models.py new file mode 100644 index 000000000..d1e20489a --- /dev/null +++ b/components/renku_data_services/search/models.py @@ -0,0 +1,52 @@ +"""Model classes for search.""" + +from __future__ import annotations + +from typing import Any + +from pydantic import BaseModel +from ulid import ULID + +from renku_data_services.data_connectors.models import DataConnector, GlobalDataConnector +from renku_data_services.namespace.models import Group +from renku_data_services.project.models import Project +from renku_data_services.users.models import UserInfo + + +class DeleteDoc(BaseModel): + """A special payload for the staging table indicating to delete a document in solr.""" + + id: str + entity_type: str + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation.""" + return {"id": self.id, "deleted": True} + + @classmethod + def solr_query(cls) -> str: + """Returns the solr query that would select all documents of this type.""" + return "deleted:true" + + @classmethod + def group(cls, id: ULID) -> DeleteDoc: + """For deleting a group.""" + return DeleteDoc(id=str(id), entity_type="Group") + + @classmethod + def project(cls, id: ULID) -> DeleteDoc: + """For deleting a project.""" + return DeleteDoc(id=str(id), entity_type="Project") + + @classmethod + def user(cls, id: str) -> DeleteDoc: + """For deleting a user.""" + return DeleteDoc(id=id, entity_type="User") + + @classmethod + def data_connector(cls, id: ULID) -> DeleteDoc: + """For deleting a data connector.""" + return DeleteDoc(id=str(id), entity_type="DataConnector") + + +Entity = UserInfo | Group | Project | DataConnector | GlobalDataConnector | DeleteDoc diff --git a/components/renku_data_services/search/orm.py b/components/renku_data_services/search/orm.py new file mode 100644 index 000000000..70f389001 --- /dev/null +++ b/components/renku_data_services/search/orm.py @@ -0,0 +1,53 @@ +"""ORM definitions for search update staging table.""" + +from datetime import datetime +from enum import StrEnum +from typing import Any + +from sqlalchemy import JSON, DateTime, Enum, MetaData, String, text +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column +from ulid import ULID + +from renku_data_services.utils.sqlalchemy import ULIDType + +JSONVariant = JSON().with_variant(JSONB(), "postgresql") + + +class BaseORM(MappedAsDataclass, DeclarativeBase): + """Base class for all ORM classes.""" + + metadata = MetaData(schema="events") # Has to match alembic ini section name + + +class RecordState(StrEnum): + """Indicates the state of a row wrt processing.""" + + Locked = "Locked" + Failed = "Failed" + + +class SearchUpdatesORM(BaseORM): + """Table for updates to SOLR.""" + + __tablename__ = "search_updates" + + id: Mapped[ULID] = mapped_column( + "id", ULIDType, primary_key=True, server_default=text("generate_ulid()"), init=False + ) + """Artificial identifier with stable order.""" + + entity_id: Mapped[str] = mapped_column("entity_id", String(100), unique=True, index=True) + """The id of the entity (user, project, etc).""" + + entity_type: Mapped[str] = mapped_column("entity_type", String(100), nullable=False) + """The entity type as a string.""" + + created_at: Mapped[datetime] = mapped_column("created_at", DateTime(timezone=True), nullable=False) + """A timestamp to indicate insertion time.""" + + payload: Mapped[dict[str, Any]] = mapped_column("payload", JSONVariant, nullable=False) + """The SOLR document of the entity as JSON.""" + + state: Mapped[RecordState] = mapped_column("state", Enum(RecordState), nullable=True, init=False) + """State for marking rows.""" diff --git a/components/renku_data_services/search/query_manual.md b/components/renku_data_services/search/query_manual.md new file mode 100644 index 000000000..93c68d9e6 --- /dev/null +++ b/components/renku_data_services/search/query_manual.md @@ -0,0 +1,377 @@ + +The search accepts queries as a query string. A query may contain +specific and unspecific search terms. + + +## Query String + +A query is a sequence of words. All words that are not recognized as +specific search terms are used for searching in various entity +properties, such as `name` or `description`. Specific search terms are +matched exactly against a certain field. Terms are separated by +whitespace. + +Example: +``` +numpy flight visibility:public +``` + +Searches for entities containing `numpy` _and_ `flight` that are +public. + +The term order is usually not relevant, it may influence the score of +a result, though. + +If a value for a specific field contains whitespace, quotes or a comma +it must be enclosed in quotes. Additionally, multiple values can be +provided for each field by using a comma separated list. The values +are treated as alternatives, so any such value would yield a result. + +Example: +``` +numpy flight visibility:public,private +``` + +Searches for entities containing `numpy` _and_ `flight` that are +_either_ `public` _or_ `private`. + +## Fields + +The following fields are available: + + + + + + + + + + +This content will be replaced by the output of the code block above. + + +Each field allows to specify one or more values, separated by comma. +The value must be separated by a `:`. For date fields, additional `<` +and `>` is supported. + +## EntityTypes + +The field `type` allows to search for specific entity types. If it is +missing, all entity types are included in the result. Entity types are: + + + + + + +This content will be replaced by the output of the code block above. + + +Example: + + + + + + +This content will be replaced by the output of the code block above. + + + +## Roles + +The field `role` allows to search for projects the current user has +the given role. Other entities are excluded from the results. + + + + + + +This content will be replaced by the output of the code block above. + + + +## Visibility + +The `visibility` field can be used to restrict to entities with a +certain visibility. Users have a default visibility of `public`. +Possible values are: + + + + + + +This content will be replaced by the output of the code block above. + + + +## Created By + +Selects entities that were created by a specific user. + + + + + + + +This content will be replaced by the output of the code block above. + + +Note that this field only accepts user-ids! I cannot (yet) resolve usernames. + +## Keywords + +Entities with certain keywords can be searched, where multiple keywords given +in one field term are combined via *OR* and multiple field-terms are combined +via *AND*. Keywords have to match exactly (no typos allowed). + + + + + + + +This content will be replaced by the output of the code block above. + + +Searches for entities that have either `data` or `ml` *and* either `health` or +`disease` as keywords. If keywords contain whitespace or a comma, they must be +quoted as written above. If a keyword contains a quote character (`"`), it must +be prefixed by a backslash (`\`) to escape from interpreting it as an +end-of-value symbol. + + + + + + + +This content will be replaced by the output of the code block above. + + +## Members + +There are two fields that allow to search for entities where a given +user is either a member of. There are two variants: + + + + + + +This content will be replaced by the output of the code block above. + + +The first includes only entities where the given user has a direct +relationship. The latter additionally includes entities, where this +relationship is deduced from where the entity is located. For example, +a member of a group is also a member of all projects in that group. So +it would select all entities that are "somehow" related to that +person. + +The value of that field is either a user id or a user name. Usernames +must be prefixed with an `@` and user ids can be specified as is. + +Examples: + + + + + + + + + +This content will be replaced by the output of the code block above. + + +Multiple members can be specified, they will then match entities where +*all* these users are members. For these fields, specifying multiple +members in one field or multiple fields is equivalent. + + + + + + + + +This content will be replaced by the output of the code block above. + + + +There is a hard limit on the number of members that can be specified. +This applies to both fields. If there are more specified, they will be +silently ignored. + + + + + + + +This content will be replaced by the output of the code block above. + + + +## Dates + +Date fields, like + + + + + + +This content will be replaced by the output of the code block above. + + +accept date strings which can be specified in various ways. There are + + + + + + + + + +This content will be replaced by the output of the code block above. + + + +### Relative dates + +There are the following keywords for relative dates: + + + + + + +This content will be replaced by the output of the code block above. + + + +### Partial Timestamps + +Timestamps must be in ISO8601 form and are UTC based and allow to +specify time up to seconds. The full form is + +``` +yyyy-mm-ddTHH:MM:ssZ +``` + +Any part starting from right can be omitted. When querying, it will be +filled with either the maximum or minimum possible value depending on +the side of comparison. When the date is an upper bound, the missing +parts will be set to their minimum values. Conversely, when used as a +lower bound then the parts are set to its maximum value. + +Example: + + + + + + + +This content will be replaced by the output of the code block above. + + + +### Date calculations + +At last, a date can be specified by adding or subtracting days from a +reference date. The reference date must be given either as a relative +date or partial timestamp. Then a `+`, `-` or `/` follows with the +amount of days. + +The `/` character allows to add and subtract the days from the +reference date, making the reference date the middle. + +Example: + + + + + + + +This content will be replaced by the output of the code block above. + + + +### Date Comparison + +Comparing dates with `>` and `<` is done as expected. More interesting +is to specify more than one date and the use of the `:` comparison. + +The `:` can be used to specify ranges more succinctly. For a full +timestamp, it means *equals*. With partial timestamps it searches +within the minimum and maximum possible date for that partial +timestamp. + +Since multiple values are combined using `OR`, it is possible to +search in multiple ranges. + +Example: + + + + + + + +This content will be replaced by the output of the code block above. + + +The above means to match entities created in March 2023 or June 2023. + +## Sorting + +The query allows to define terms for sorting. Sorting is limited to +specific fields, which are: + + + + + + +This content will be replaced by the output of the code block above. + + +Sorting by a field is defined by writing the field name, followed by a +dash and the sort direction. Multiple such definitions can be +specified, using a comma separated list. Alternatively, multiple +`sort:…` terms will be combined into a single one in the order they +appear. + +Example: + + + + + + + + +This content will be replaced by the output of the code block above. + + +is equivalent to + + + + + + + + + +This content will be replaced by the output of the code block above. + diff --git a/components/renku_data_services/search/query_manual.py b/components/renku_data_services/search/query_manual.py new file mode 100644 index 000000000..49362fd01 --- /dev/null +++ b/components/renku_data_services/search/query_manual.py @@ -0,0 +1,49 @@ +"""Generate the complete manual for the user query.""" + +from pathlib import Path + +import markdown_code_runner as mcr +from markdown_code_runner import process_markdown + +from renku_data_services.app_config import logging + +## Remove the warning with fancy unicode that is inserted into every +## occurence of a injected code result, because it breaks the +## conversion to html in the swagger page… +mcr.MARKERS.update({"warning": ""}) +mcr.PATTERNS = mcr.markers_to_patterns() + + +logger = logging.getLogger(__file__) + + +def __convert_file(input: Path | str) -> str: + inp = input if isinstance(input, Path) else Path(input) + with inp.open() as f: + lines = [line.rstrip("\n") for line in f.readlines()] + + new_lines = process_markdown(lines, verbose=False) + return "\n".join(new_lines).rstrip() + "\n" + + +def manual_to_file(out: str | Path) -> None: + """Print the query manual to the given file.""" + text = manual_to_str() + outp = out if isinstance(out, Path) else Path(out) + with outp.open("w") as f: + f.write(text) + + +def manual_to_str() -> str: + """Return the query manual as a markdown string.""" + manual = Path(__file__).parent / "query_manual.md" + return __convert_file(manual) + + +def safe_manual_to_str() -> str: + """Return the query manual or a placeholder if it fails.""" + try: + return manual_to_str() + except Exception as e: + logger.error("Error generating the search query documentation!", exc_info=e) + return "Generating the documentation failed." diff --git a/components/renku_data_services/search/reprovision.py b/components/renku_data_services/search/reprovision.py new file mode 100644 index 000000000..d20453a04 --- /dev/null +++ b/components/renku_data_services/search/reprovision.py @@ -0,0 +1,147 @@ +"""Code for reprovisioning the search index.""" + +from collections.abc import AsyncGenerator, Callable +from datetime import datetime + +from renku_data_services.app_config import logging +from renku_data_services.base_api.pagination import PaginationRequest +from renku_data_services.base_models.core import APIUser +from renku_data_services.data_connectors.db import DataConnectorRepository +from renku_data_services.data_connectors.models import DataConnector, GlobalDataConnector +from renku_data_services.errors.errors import ForbiddenError +from renku_data_services.message_queue.db import ReprovisioningRepository +from renku_data_services.message_queue.models import Reprovisioning +from renku_data_services.namespace.db import GroupRepository +from renku_data_services.namespace.models import Group +from renku_data_services.project.db import ProjectRepository +from renku_data_services.project.models import Project +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.solr.solr_client import DefaultSolrClient, SolrClientConfig +from renku_data_services.users.db import UserRepo +from renku_data_services.users.models import UserInfo + +logger = logging.getLogger(__name__) + + +class SearchReprovision: + """Encapsulates routines to reprovision the index.""" + + def __init__( + self, + search_updates_repo: SearchUpdatesRepo, + reprovisioning_repo: ReprovisioningRepository, + solr_config: SolrClientConfig, + user_repo: UserRepo, + group_repo: GroupRepository, + project_repo: ProjectRepository, + data_connector_repo: DataConnectorRepository, + ) -> None: + self._search_updates_repo = search_updates_repo + self._reprovisioning_repo = reprovisioning_repo + self._solr_config = solr_config + self._user_repo = user_repo + self._group_repo = group_repo + self._project_repo = project_repo + self._data_connector_repo = data_connector_repo + + async def run_reprovision(self, admin: APIUser) -> int: + """Start a reprovisioning if not already running.""" + reprovision = await self.acquire_reprovision() + return await self.init_reprovision(admin, reprovision) + + async def acquire_reprovision(self) -> Reprovisioning: + """Acquire a reprovisioning slot. Throws if already taken.""" + return await self._reprovisioning_repo.start() + + async def kill_reprovision_lock(self) -> None: + """Removes an existing reprovisioning lock.""" + return await self._reprovisioning_repo.stop() + + async def get_current_reprovision(self) -> Reprovisioning | None: + """Return the current reprovisioning lock.""" + return await self._reprovisioning_repo.get_active_reprovisioning() + + async def _get_all_data_connectors( + self, user: APIUser, per_page: int = 20 + ) -> AsyncGenerator[DataConnector | GlobalDataConnector, None]: + """Get all data connectors, retrieving `per_page` each time.""" + preq = PaginationRequest(page=1, per_page=per_page) + result: tuple[list[DataConnector | GlobalDataConnector], int] | None = None + count: int = 0 + while result is None or result[1] > count: + result = await self._data_connector_repo.get_data_connectors(user=user, pagination=preq) + count = count + len(result[0]) + preq = PaginationRequest(page=preq.page + 1, per_page=per_page) + for dc in result[0]: + yield dc + + async def init_reprovision(self, admin: APIUser, reprovisioning: Reprovisioning) -> int: + """Initiates reprovisioning by inserting documents into the staging table. + + Deletes all renku entities in the solr core. Then it goes + through all entities in the postgres datatabase and inserts + solr documents into the `search_update` table. A background + process is querying this table and will eventually update + solr with these entries. + """ + + if not admin.is_admin: + raise ForbiddenError(message="Only Renku administrators are allowed to start search reprovisioning.") + + def log_counter(c: int) -> None: + if c % 50 == 0: + logger.info(f"Inserted {c}. entities into staging table...") + + counter = 0 + try: + logger.info(f"Starting reprovisioning with ID {reprovisioning.id}") + started = datetime.now() + await self._search_updates_repo.clear_all() + async with DefaultSolrClient(self._solr_config) as client: + await client.delete("_type:*") + + all_users = self._user_repo.get_all_users(requested_by=admin) + counter = await self.__update_entities(all_users, "user", started, counter, log_counter) + logger.info(f"Done adding user entities to search_updates table. Record count: {counter}.") + + all_groups = self._group_repo.get_all_groups(requested_by=admin) + counter = await self.__update_entities(all_groups, "group", started, counter, log_counter) + logger.info(f"Done adding group entities to search_updates table. Record count: {counter}") + + all_projects = self._project_repo.get_all_projects(requested_by=admin) + counter = await self.__update_entities(all_projects, "project", started, counter, log_counter) + logger.info(f"Done adding project entities to search_updates table. Record count: {counter}") + + all_dcs = self._get_all_data_connectors(admin, per_page=20) + counter = await self.__update_entities(all_dcs, "data connector", started, counter, log_counter) + logger.info(f"Done adding dataconnector entities to search_updates table. Record count: {counter}") + + logger.info(f"Inserted {counter} entities into the staging table.") + except Exception as e: + logger.error("Error while reprovisioning entities!", exc_info=e) + ## TODO error handling. skip or fail? + finally: + await self._reprovisioning_repo.stop() + + return counter + + async def __update_entities( + self, + iter: AsyncGenerator[Project | Group | UserInfo | DataConnector | GlobalDataConnector, None], + name: str, + started: datetime, + counter: int, + on_count: Callable[[int], None], + ) -> int: + try: + async for entity in iter: + try: + await self._search_updates_repo.insert(entity, started) + counter += 1 + on_count(counter) + except Exception as e: + logger.error(f"Error updating search entry for {name} {entity.id}: {e}", exc_info=e) + except Exception as e: + logger.error(f"Error updating search entry for {name}s: {e}", exc_info=e) + + return counter diff --git a/components/renku_data_services/search/solr_token.py b/components/renku_data_services/search/solr_token.py new file mode 100644 index 000000000..236159d97 --- /dev/null +++ b/components/renku_data_services/search/solr_token.py @@ -0,0 +1,185 @@ +"""Model for creating solr lucene queries.""" + +import re +from collections.abc import Iterable +from datetime import UTC, datetime +from typing import NewType + +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.nel import Nel +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.entity_schema import Fields +from renku_data_services.solr.solr_schema import FieldName + +SolrToken = NewType("SolrToken", str) + +# Escapes query characters for solr. This is taken from here: +# https://github.com/apache/solr/blob/bcb9f144974ed07aa3b66766302474542067b522/solr/solrj/src/java/org/apache/solr/client/solrj/util/ClientUtils.java#L163 +__defaultSpecialChars = '\\+-!():^[]"{}~*?|&;/' + + +def __escape(input: str, bad_chars: str) -> str: + output = "" + for c in input: + if c.isspace() or bad_chars.find(c) >= 0: + output += "\\" + + output += c + + return output + + +def __escape_query(input: str) -> str: + return __escape(input, __defaultSpecialChars) + + +def empty() -> SolrToken: + """Return the empty string.""" + return SolrToken("") + + +def all_query() -> SolrToken: + """A solr query to return all documents.""" + return SolrToken("*:*") + + +def from_str(input: str) -> SolrToken: + """Create a solr query part from a string.""" + return SolrToken(__escape_query(input)) + + +def from_visibility(v: Visibility) -> SolrToken: + """Create a solr query value for a visibility.""" + return SolrToken(v.value.lower()) + + +def from_entity_type(et: EntityType) -> SolrToken: + """Create a solr query value for an entity type.""" + return SolrToken(et.value) + + +def from_datetime(dt: datetime) -> SolrToken: + """Convert the datetime into a solr query value. + + Solr uses UTC formatted timestamps, preferring the `Z` suffix + indicating UTC timezone. + + https://solr.apache.org/guide/solr/latest/indexing-guide/date-formatting-math.html + """ + dt = dt.astimezone(UTC).replace(microsecond=0) + dt_str = dt.isoformat() + dt_str = dt_str.replace("+00:00", "Z") # couldn't find a good way… + return SolrToken(__escape(dt_str, ":")) + + +def from_date_range(min: datetime, max: datetime) -> SolrToken: + """Convert a date range into a solr query value.""" + start = from_datetime(min) + end = from_datetime(max) + return SolrToken(f"[{start} TO {end}]") + + +def field_is(field: FieldName, value: SolrToken) -> SolrToken: + """Create a solr query part for a field.""" + return SolrToken(f"{field}:{value}") + + +def field_exists(field: FieldName) -> SolrToken: + """Look for an existing field.""" + return field_is(field, SolrToken("[* TO *]")) + + +def field_not_exists(field: FieldName) -> SolrToken: + """Return a query part checking if a field does not exist.""" + return SolrToken(f"-{field}:[* TO *]") + + +def field_is_any(field: FieldName, value: Nel[SolrToken]) -> SolrToken: + """Search for any value in the given field.""" + if value.more_values == []: + return field_is(field, value.value) + else: + vs = fold_or(value) + return field_is(field, SolrToken(f"({vs})")) + + +def type_is(et: EntityType) -> SolrToken: + """Search for the type field.""" + return field_is(Fields.entity_type, from_entity_type(et)) + + +def fold_and(tokens: Iterable[SolrToken]) -> SolrToken: + """Combine multiple solr query parts with AND.""" + return SolrToken(" AND ".join(tokens)) + + +def fold_or(tokens: Iterable[SolrToken]) -> SolrToken: + """Combine multiple solr query parts with OR.""" + return SolrToken(" OR ".join(tokens)) + + +def id_is(id: str) -> SolrToken: + """Create a solr query part for a given id.""" + return field_is(Fields.id, from_str(id)) + + +def id_in(ids: Nel[str]) -> SolrToken: + """Create a solr query part that matches any given id.""" + return field_is_any(Fields.id, ids.map(from_str)) + + +def id_not_exists() -> SolrToken: + """Create a solr query part matching documents without an id.""" + return field_not_exists(Fields.id) + + +def public_or_ids(allowed_ids: list[str]) -> SolrToken: + """Create a solr query part selecting public entities or ones with the given ids.""" + id_nel: Nel[str] | None = Nel.from_list(allowed_ids) + match id_nel: + case Nel() as nl: + return SolrToken(f"({public_only()} OR {id_in(nl)})") + case _: + return public_only() + + +def created_is(dt: datetime) -> SolrToken: + """Create a solr query part comparing the creation_date.""" + return field_is(Fields.creation_date, from_datetime(dt)) + + +def created_range(min: datetime, max: datetime) -> SolrToken: + """Create a solr query part comparing the creation_date.""" + return field_is(Fields.creation_date, from_date_range(min, max)) + + +def created_gt(dt: datetime) -> SolrToken: + """Create a solr query part comparing the creation_date.""" + return field_is(Fields.creation_date, SolrToken(f"[{from_datetime(dt)} TO *]")) + + +def created_lt(dt: datetime) -> SolrToken: + """Create a solr query part comparing the creation_date.""" + return field_is(Fields.creation_date, SolrToken(f"[* TO {from_datetime(dt)}]")) + + +def all_entities() -> SolrToken: + """Searches renku entity documents.""" + return field_is(Fields.kind, SolrToken("fullentity")) + + +def public_only() -> SolrToken: + """Search only public entities.""" + return field_is(Fields.visibility, from_visibility(Visibility.PUBLIC)) + + +def content_all(text: str) -> SolrToken: + """Search the content_all field with fuzzy searching each term.""" + terms: list[SolrToken] = list(map(lambda s: SolrToken(__escape_query(s) + "~"), re.split("\\s+", text))) + terms_str = "(" + " ".join(terms) + ")" + return SolrToken(f"{Fields.content_all}:{terms_str}") + + +def created_by_exists() -> SolrToken: + """Query part that requires an existing createdBy field for a project document.""" + return SolrToken("(createdBy:[* TO *] OR (*:* AND -_type:Project))") diff --git a/components/renku_data_services/search/solr_user_query.py b/components/renku_data_services/search/solr_user_query.py new file mode 100644 index 000000000..d9deff208 --- /dev/null +++ b/components/renku_data_services/search/solr_user_query.py @@ -0,0 +1,433 @@ +"""Creating queries for solr given a parsed user search query.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from collections.abc import Iterable +from dataclasses import dataclass, field +from datetime import datetime, tzinfo +from typing import override + +import renku_data_services.search.solr_token as st +from renku_data_services.authz.models import Role +from renku_data_services.base_models.core import APIUser +from renku_data_services.base_models.nel import Nel +from renku_data_services.search.solr_token import SolrToken +from renku_data_services.search.user_query import ( + Comparison, + Created, + CreatedByIs, + DirectMemberIs, + IdIs, + InheritedMemberIs, + KeywordIs, + NameIs, + NamespaceIs, + Order, + OrderBy, + RoleIs, + SlugIs, + SortableField, + Text, + TypeIs, + UserDef, + UserId, + Username, + UserQuery, + UserQueryVisitor, + VisibilityIs, +) +from renku_data_services.search.user_query_process import CollectEntityTypes +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.entity_schema import Fields +from renku_data_services.solr.solr_client import SortDirection +from renku_data_services.solr.solr_schema import FieldName +from renku_data_services.users.db import UsernameResolver + + +@dataclass +class SolrUserQuery: + """A solr query with an optional sort definition. + + This is the result of interpreting a user search query. + """ + + query: SolrToken + sort: list[tuple[FieldName, SortDirection]] + + def append(self, next: SolrUserQuery) -> SolrUserQuery: + """Creates a new query appending `next` to this.""" + return type(self)(SolrToken(f"({self.query}) AND ({next.query})"), self.sort + next.sort) + + def query_str(self) -> str: + """Return the solr query string.""" + if self.query == "": + return st.all_entities() + else: + return self.query + + +@dataclass +class AdminRole: + """An admin is searching.""" + + id: str + + +@dataclass +class UserRole: + """A logged in user is searching.""" + + id: str + + +type SearchRole = AdminRole | UserRole + + +class AuthAccess(ABC): + """Access authorization information.""" + + @abstractmethod + async def get_ids_for_role( + self, user_id: str, roles: Nel[Role], ets: Iterable[EntityType], direct_membership: bool + ) -> list[str]: + """Return resource ids for which the given user has the given role.""" + ... + + @classmethod + def none(cls) -> AuthAccess: + """An implementation returning no access.""" + return _NoAuthAccess() + + +class _NoAuthAccess(AuthAccess): + async def get_ids_for_role( + self, user_id: str, roles: Nel[Role], ets: Iterable[EntityType], direct_membership: bool + ) -> list[str]: + return [] + + +class UsernameResolve(ABC): + """Resolve usernames to their ids.""" + + @abstractmethod + async def resolve_usernames(self, names: Nel[Username]) -> dict[Username, UserId]: + """Return the user id for a given user name.""" + ... + + @classmethod + def none(cls) -> UsernameResolve: + """An implementation that doesn't resolve names.""" + return _EmptyUsernameResolve() + + @classmethod + def db(cls, repo: UsernameResolver) -> UsernameResolve: + """An implementation using the resolver from the user module.""" + return _DbUsernameResolve(repo) + + +class _EmptyUsernameResolve(UsernameResolve): + @override + async def resolve_usernames(self, names: Nel[Username]) -> dict[Username, UserId]: + return {} + + +class _DbUsernameResolve(UsernameResolve): + def __init__(self, resolver: UsernameResolver) -> None: + self._resolver = resolver + + async def resolve_usernames(self, names: Nel[Username]) -> dict[Username, UserId]: + """Return the user id for a given user name.""" + result = {} + for k, v in (await self._resolver.resolve_usernames(names.map(lambda n: n.slug.value))).items(): + result.update({Username.from_name(k): UserId(v)}) + + return result + + +@dataclass +class Context: + """Contextual information available at search time. + + A single context is meant to be created for interpreting a single query. + """ + + current_time: datetime + zone: tzinfo + role: SearchRole | None + auth_access: AuthAccess = field(default_factory=AuthAccess.none) + username_resolve: UsernameResolve = field(default_factory=UsernameResolve.none) + requested_entity_types: set[EntityType] | None = None + + def __copy( + self, + role: SearchRole | None = None, + auth_access: AuthAccess | None = None, + username_resolve: UsernameResolve | None = None, + requested_entity_types: set[EntityType] | None = None, + ) -> Context: + return Context( + self.current_time, + self.zone, + role or self.role, + auth_access or self.auth_access, + username_resolve or self.username_resolve, + requested_entity_types=requested_entity_types, + ) + + async def with_requested_entity_types(self, uq: UserQuery) -> Context: + """Return a copy with the requested entity types set.""" + et = await uq.accept(CollectEntityTypes()) + return self if self.requested_entity_types == et else self.__copy(requested_entity_types=et) + + def with_role(self, role: SearchRole) -> Context: + """Return a copy wit the given role set.""" + return self if self.role == role else self.__copy(role=role) + + def with_user_role(self, user_id: str) -> Context: + """Return a copy with the given user id as user role set.""" + return self if self.role == UserRole(user_id) else self.__copy(role=UserRole(user_id)) + + def with_admin_role(self, user_id: str) -> Context: + """Return a copy with the given user id as admin role set.""" + return self if self.role == AdminRole(user_id) else self.__copy(role=AdminRole(user_id)) + + def with_anonymous(self) -> Context: + """Return a copy with no search role set.""" + return self if self.role is None else self.__copy(role=None) + + def with_api_user(self, api_user: APIUser) -> Context: + """Return a copy with the search role set by the APIUser.""" + if api_user.id is not None and api_user.is_admin: + return self.with_admin_role(api_user.id) + elif api_user.id is not None and api_user.is_authenticated: + return self.with_user_role(api_user.id) + else: + return self.with_anonymous() + + def with_auth_access(self, aa: AuthAccess) -> Context: + """Return a copy with the given AuthAccess set.""" + return self.__copy(auth_access=aa) + + def with_username_resolve(self, ur: UsernameResolve) -> Context: + """Return a copy with the given UsernameResolve set.""" + return self.__copy(username_resolve=ur) + + def get_entity_types(self) -> list[EntityType]: + """Return the list of entity types that are requested from the query.""" + return [e for e in EntityType] if self.requested_entity_types is None else list(self.requested_entity_types) + + async def get_ids_for_roles(self, roles: Nel[Role], direct_membership: bool) -> list[str] | None: + """Return a list of ids the user has one of the given roles. + + Return None when anonymous. + """ + ets = self.get_entity_types() + match self.role: + case UserRole() as r: + return await self.auth_access.get_ids_for_role(r.id, roles, ets, direct_membership) + case AdminRole() as r: + return await self.auth_access.get_ids_for_role(r.id, roles, ets, direct_membership) + case _: + return None + + async def get_member_ids(self, users: Nel[UserDef], direct_membership: bool) -> list[str]: + """Return a list of resource ids, all given users are members of.""" + result: set[str] = set() + ids: set[UserId] = set() + names: set[Username] = set() + ets = self.get_entity_types() + for user_def in users: + match user_def: + case Username() as u: + names.add(u) + + case UserId() as u: + ids.add(u) + + match Nel.from_list(list(names)): + case None: + pass + case nel: + remain_ids = await self.username_resolve.resolve_usernames(nel) + ids.update(remain_ids.values()) + + for uid in ids: + n = await self.auth_access.get_ids_for_role(uid.id, Nel.of(Role.VIEWER), ets, direct_membership) + result = set(n) if result == set() else result.intersection(set(n)) + + return list(result) + + @classmethod + def for_anonymous(cls, current_time: datetime, zone: tzinfo) -> Context: + """Creates a Context for interpreting a query as anonymous user.""" + return Context(current_time, zone, None) + + @classmethod + def for_admin(cls, current_time: datetime, zone: tzinfo, user_id: str) -> Context: + """Creates a Context for interpreting a query as an admin.""" + return Context(current_time, zone, AdminRole(user_id)) + + @classmethod + def for_user(cls, current_time: datetime, zone: tzinfo, user_id: str) -> Context: + """Creates a Context for interpreting a query as a normal user.""" + return Context(current_time, zone, UserRole(user_id)) + + @classmethod + def for_api_user(cls, current_time: datetime, zone: tzinfo, api_user: APIUser) -> Context: + """Creates a Context for the give APIUser.""" + return cls.for_anonymous(current_time, zone).with_api_user(api_user) + + +class QueryInterpreter(ABC): + """Interpreter for user search queries.""" + + @abstractmethod + async def run(self, ctx: Context, q: UserQuery) -> SolrUserQuery: + """Convert a user query into a search query.""" + ... + + @classmethod + def default(cls) -> QueryInterpreter: + """Return the default query interpreter.""" + return LuceneQueryInterpreter() + + +class _LuceneQueryTransform(UserQueryVisitor[SolrUserQuery]): + """Transform a UserQuery into a SolrUserQuery.""" + + def __init__(self, ctx: Context) -> None: + self.solr_sort: list[tuple[FieldName, SortDirection]] = [] + self.solr_token: list[SolrToken] = [] + self.ctx = ctx + + async def build(self) -> SolrUserQuery: + """Create and return the solr query.""" + return SolrUserQuery(st.fold_and(self.solr_token), self.solr_sort) + + async def visit_order(self, order: Order) -> None: + """Process order.""" + sort = [self._to_solr_sort(e) for e in order.fields] + self.solr_sort.extend(sort) + + @classmethod + def _to_solr_sort(cls, ob: OrderBy) -> tuple[FieldName, SortDirection]: + match ob.field: + case SortableField.fname: + return (Fields.name, ob.direction) + case SortableField.score: + return (Fields.score, ob.direction) + case SortableField.created: + return (Fields.creation_date, ob.direction) + + def __append(self, t: SolrToken) -> None: + self.solr_token.append(t) + + async def visit_text(self, text: Text) -> None: + """Process free text segment.""" + if text.value != "": + self.__append(st.content_all(text.value)) + + async def visit_type_is(self, ft: TypeIs) -> None: + """Process type-is segment.""" + self.__append(st.field_is_any(Fields.entity_type, ft.values.map(st.from_entity_type))) + + async def visit_id_is(self, ft: IdIs) -> None: + """Process id-is segment.""" + self.__append(st.field_is_any(Fields.id, ft.values.map(st.from_str))) + + async def visit_name_is(self, ft: NameIs) -> None: + """Process name-is segment.""" + self.__append(st.field_is_any(Fields.name, ft.values.map(st.from_str))) + + async def visit_slug_is(self, ft: SlugIs) -> None: + """Process slug-is segment.""" + self.__append(st.field_is_any(Fields.slug, ft.values.map(st.from_str))) + + async def visit_visibility_is(self, ft: VisibilityIs) -> None: + """Process visibility-is segment.""" + self.__append(st.field_is_any(Fields.visibility, ft.values.map(st.from_visibility))) + + async def visit_keyword_is(self, ft: KeywordIs) -> None: + """Process keyword-is segment.""" + self.__append(st.field_is_any(Fields.keywords, ft.values.map(st.from_str))) + + async def visit_namespace_is(self, ft: NamespaceIs) -> None: + """Process the namespace-is segment.""" + self.__append(st.field_is_any(Fields.namespace_path, ft.values.map(st.from_str))) + + async def visit_created_by_is(self, ft: CreatedByIs) -> None: + """Process the created-by segment.""" + self.__append(st.field_is_any(Fields.created_by, ft.values.map(st.from_str))) + + async def visit_role_is(self, ft: RoleIs) -> None: + """Process role-is segment.""" + ids = await self.ctx.get_ids_for_roles(ft.values, direct_membership=True) + if ids is not None: + nel = Nel.from_list(ids) + if nel is None: + self.__append(st.id_not_exists()) + else: + self.__append(st.id_in(nel)) + + async def visit_inherited_member_is(self, ft: InheritedMemberIs) -> None: + """Process inherited-member-is segment.""" + ids = await self.ctx.get_member_ids(ft.users, direct_membership=False) + match Nel.from_list(ids): + case None: + self.__append(st.id_not_exists()) + case nel: + self.__append(st.id_in(nel)) + + async def visit_direct_member_is(self, ft: DirectMemberIs) -> None: + """Process direct-member-is segment.""" + ids = await self.ctx.get_member_ids(ft.users, direct_membership=True) + match Nel.from_list(ids): + case None: + self.__append(st.id_not_exists()) + case nel: + self.__append(st.id_in(nel)) + + async def visit_created(self, ft: Created) -> None: + """Process the created segment.""" + tokens: list[SolrToken] = [] + match ft.cmp: + case Comparison.is_equal: + for dt in ft.values: + (min, max_opt) = dt.resolve(self.ctx.current_time, self.ctx.zone) + tokens.append(st.created_range(min, max_opt) if max_opt is not None else st.created_is(min)) + + self.__append(st.fold_or(tokens)) + + case Comparison.is_greater_than: + for dt in ft.values: + (min, max_opt) = dt.resolve(self.ctx.current_time, self.ctx.zone) + tokens.append(st.created_gt(max_opt or min)) + + self.__append(st.fold_or(tokens)) + + case Comparison.is_lower_than: + for dt in ft.values: + (min, _) = dt.resolve(self.ctx.current_time, self.ctx.zone) + tokens.append(st.created_lt(min)) + + self.__append(st.fold_or(tokens)) + + +class LuceneQueryInterpreter(QueryInterpreter): + """Convert a user search query into solrs standard query. + + See https://solr.apache.org/guide/solr/latest/query-guide/standard-query-parser.html + + This class takes care of converting a user supplied query into the + corresponding solr query. + + Here the search query can be tweaked if necessary (fuzzy searching + etc). + + """ + + async def run(self, ctx: Context, q: UserQuery) -> SolrUserQuery: + """Convert a user query into a solr search query.""" + + return await q.accept(_LuceneQueryTransform(ctx)) diff --git a/components/renku_data_services/search/user_query.py b/components/renku_data_services/search/user_query.py new file mode 100644 index 000000000..b91721a6c --- /dev/null +++ b/components/renku_data_services/search/user_query.py @@ -0,0 +1,1024 @@ +"""AST for a user search query.""" + +from __future__ import annotations + +import calendar +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from dataclasses import field as data_field +from datetime import date, datetime, time, timedelta, tzinfo +from enum import StrEnum +from typing import Self, override + +from renku_data_services.authz.models import Role, Visibility +from renku_data_services.base_models.core import NamespaceSlug +from renku_data_services.base_models.nel import Nel +from renku_data_services.namespace.models import UserNamespace +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.solr_client import SortDirection +from renku_data_services.users.models import UserInfo + + +class Helper: + """Internal helper functions.""" + + @classmethod + def is_valid_char(cls, c: str) -> bool: + """Test for a character that doesn't require quoting.""" + return not c.isspace() and c != '"' and c != "\\" and c != "," + + @classmethod + def quote(cls, input: str) -> str: + """Wraps input in quotes if necessary.""" + for c in input: + if not Helper.is_valid_char(c): + return f'"{input.replace('"', '\\"')}"' + return input + + +class Field(StrEnum): + """A field name.""" + + id = "id" + fname = "name" + slug = "slug" + visibility = "visibility" + created = "created" + created_by = "createdby" + type = "type" + role = "role" + keyword = "keyword" + namespace = "namespace" + direct_member = "direct_member" + inherited_member = "inherited_member" + + +class Comparison(StrEnum): + """A comparison for a field.""" + + is_equal = ":" + is_lower_than = "<" + is_greater_than = ">" + + +@dataclass(eq=True, frozen=True) +class Username: + """A user identifier: username slug.""" + + slug: NamespaceSlug + __hashvalue: int | None = field(init=False, repr=False, default=None) + + def render(self) -> str: + """Render the query part of this value.""" + return f"@{self.slug.value}" + + @classmethod + def from_name(cls, s: str) -> Username: + """Create a Username from a string.""" + return Username(NamespaceSlug.from_name(s)) + + @classmethod + def from_user_namespace(cls, ns: UserNamespace) -> Username: + """Create a Username from a UserNamespace.""" + return Username(ns.path.first) + + @classmethod + def from_user_info(cls, u: UserInfo) -> Username: + """Create a Username from a UserInfo.""" + return cls.from_user_namespace(u.namespace) + + +@dataclass(eq=True, frozen=True) +class UserId: + """A user identifier (the keycloak one).""" + + id: str + + def render(self) -> str: + """Renders the query representation of this value.""" + return self.id + + +type UserDef = Username | UserId + + +@dataclass +class PartialDate: + """A date where month and day may be omitted.""" + + year: int + month: int | None = data_field(default=None) + dayOfMonth: int | None = data_field(default=None) + + def render(self) -> str: + """Return the string representation.""" + res = f"{self.year}" + if self.month is not None: + res += f"-{self.month:02}" + if self.dayOfMonth is not None: + res += f"-{self.dayOfMonth:02}" + return res + + def is_exact(self) -> bool: + """Return whether all optional parts are set.""" + return self.month is not None and self.dayOfMonth is not None + + def max(self) -> PartialDate: + """Set missing parts to the maximum value.""" + m = self.month or 12 + (_, dom) = calendar.monthrange(self.year, m) + return PartialDate(self.year, m, self.dayOfMonth or dom) + + def min(self) -> PartialDate: + """Set missing parts to the lowest value.""" + return PartialDate( + self.year, + self.month or 1, + self.dayOfMonth or 1, + ) + + def date_max(self) -> date: + """Set missing parts to the maximum value.""" + dm = self.max() + return date(dm.year, dm.month or 0, dm.dayOfMonth or 0) + + def date_min(self) -> date: + """Set missing parts to the lowest value.""" + return date( + self.year, + self.month or 1, + self.dayOfMonth or 1, + ) + + +@dataclass +class PartialTime: + """A time where minutes and seconds are optional.""" + + hour: int + minute: int | None = data_field(default=None) + second: int | None = data_field(default=None) + + def render(self) -> str: + """Renders the string representation.""" + res = f"{self.hour:02}" + if self.minute is not None: + res += f":{self.minute:02}" + if self.second is not None: + res += f":{self.second:02}" + return res + + def max(self) -> PartialTime: + """Set missing parts to the highest value.""" + return PartialTime(self.hour, self.minute or 59, self.second or 59) + + def min(self) -> PartialTime: + """Set missing parts to the lowest value.""" + return PartialTime(self.hour, self.minute or 0, self.second or 0) + + def time_max(self) -> time: + """Set missing parts to the highest value.""" + return time(self.hour, self.minute or 59, self.second or 59) + + def time_min(self) -> time: + """Set missing parts to the lowest value.""" + return time(self.hour, self.minute or 0, self.second or 0) + + +@dataclass +class PartialDateTime: + """A date time, where minor fields are optional.""" + + date: PartialDate + time: PartialTime | None = data_field(default=None) + zone: tzinfo | None = data_field(default=None) + + def render(self) -> str: + """Renders the string representation.""" + res = self.date.render() + if self.time is not None: + res += f"T{self.time.render()}" + if self.zone is not None: + res += "" + return res + + def max(self) -> PartialDateTime: + """Set missing parts to the highest value.""" + return PartialDateTime(self.date.max(), (self.time or PartialTime(23)).max()) + + def min(self) -> PartialDateTime: + """Set missing parts to the lowest value.""" + return PartialDateTime(self.date.min(), (self.time or PartialTime(0)).min()) + + def datetime_max(self, default_zone: tzinfo) -> datetime: + """Set missing parts to the highest value.""" + d = self.date.date_max() + t = (self.time or PartialTime(23, 59, 59)).time_max() + return datetime(d.year, d.month, d.day, t.hour, t.minute, t.second, 0, self.zone or default_zone, fold=t.fold) + + def datetime_min(self, default_zone: tzinfo) -> datetime: + """Set missing parts to the lowest value.""" + d = self.date.date_min() + t = (self.time or PartialTime(0)).time_min() + return datetime(d.year, d.month, d.day, t.hour, t.minute, t.second, 0, self.zone or default_zone, fold=t.fold) + + def with_zone(self, zone: tzinfo) -> Self: + """Return a copy with the given zone set.""" + return type(self)(self.date, self.time, zone) + + def resolve(self, ref: datetime, zone: tzinfo) -> tuple[datetime, datetime | None]: + """Resolve this partial date using the given reference.""" + min = self.datetime_min(zone) + max = self.datetime_max(zone) + if min != max: + return (min, max) + else: + return (min, None) + + +class RelativeDate(StrEnum): + """A date relative to a reference date.""" + + today = "today" + yesterday = "yesterday" + + def render(self) -> str: + """Renders the string representation.""" + return self.name + + def resolve(self, ref: datetime, zone: tzinfo) -> tuple[datetime, datetime | None]: + """Resolve this relative date using the given reference.""" + match self: + case RelativeDate.today: + ref_dt = ref + case RelativeDate.yesterday: + ref_dt = ref - timedelta(days=1) + + pd = PartialDateTime(PartialDate(ref_dt.year, ref_dt.month, ref_dt.day)) + return pd.resolve(ref, zone) + + +@dataclass +class DateTimeCalc: + """A date specification using calculation from a reference date.""" + + ref: PartialDateTime | RelativeDate + amount_days: int + is_range: bool + + def render(self) -> str: + """Renders the string representation.""" + period = self.amount_days.__abs__() + sep = "+" + if self.is_range: + sep = "/" + if self.amount_days < 0: + sep = "-" + + return f"{self.ref.render()}{sep}{period}d" + + def resolve(self, ref: datetime, zone: tzinfo) -> tuple[datetime, datetime | None]: + """Resolve this date calculation using the given reference.""" + (ts_min, ts_max_opt) = self.ref.resolve(ref, zone) + if self.is_range: + return ( + ts_min - timedelta(days=self.amount_days), + (ts_max_opt or ts_min) + timedelta(days=self.amount_days), + ) + else: + return (ts_min + timedelta(days=self.amount_days), None) + + +type DateTimeRef = PartialDateTime | RelativeDate | DateTimeCalc + + +class SegmentBase(ABC): + """Base class for a query segment.""" + + @abstractmethod + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + ... + + +class FieldComparison(SegmentBase): + """A query part for a specific field.""" + + @property + @abstractmethod + def field(self) -> Field: + """The field to compare.""" + ... + + @property + @abstractmethod + def cmp(self) -> Comparison: + """The comparision to use.""" + ... + + @abstractmethod + def _render_value(self) -> str: ... + + def render(self) -> str: + """Renders the string representation.""" + return f"{self.field.value}{self.cmp.value}{self._render_value()}" + + +@dataclass +class InheritedMemberIs(FieldComparison): + """Check for membership of a given user.""" + + users: Nel[UserDef] + + @property + def field(self) -> Field: + """The field name.""" + return Field.inherited_member + + @property + def cmp(self) -> Comparison: + """The comparison to use.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.users.map(lambda u: u.render()).mk_string(",") + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_inherited_member_is(self) + + +@dataclass +class DirectMemberIs(FieldComparison): + """Check for direct membership of a given user.""" + + users: Nel[UserDef] + + @property + def field(self) -> Field: + """The field name.""" + return Field.direct_member + + @property + def cmp(self) -> Comparison: + """The comparison to use.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.users.map(lambda u: u.render()).mk_string(",") + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_direct_member_is(self) + + +@dataclass +class TypeIs(FieldComparison): + """Compare the type property against a list of values.""" + + values: Nel[EntityType] + + @property + def field(self) -> Field: + """The field name.""" + return Field.type + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",") + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_type_is(self) + + +@dataclass +class IdIs(FieldComparison): + """Compare document id against a list of values.""" + + values: Nel[str] + + @property + def field(self) -> Field: + """The field name.""" + return Field.id + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",", Helper.quote) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_id_is(self) + + +@dataclass +class NameIs(FieldComparison): + """Compare the name against a list of values.""" + + values: Nel[str] + + @property + def field(self) -> Field: + """The field name.""" + return Field.fname + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",", Helper.quote) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_name_is(self) + + +@dataclass +class SlugIs(FieldComparison): + """Compare the slug against a list of values.""" + + values: Nel[str] + + @property + def field(self) -> Field: + """The field name.""" + return Field.slug + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",", Helper.quote) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_slug_is(self) + + +@dataclass +class KeywordIs(FieldComparison): + """Compare the keyword against a list of values.""" + + values: Nel[str] + + @property + def field(self) -> Field: + """The field name.""" + return Field.keyword + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",", Helper.quote) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_keyword_is(self) + + +@dataclass +class NamespaceIs(FieldComparison): + """Compare the keyword against a list of values.""" + + values: Nel[str] + + @property + def field(self) -> Field: + """The field name.""" + return Field.namespace + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",", Helper.quote) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_namespace_is(self) + + +@dataclass +class VisibilityIs(FieldComparison): + """Compare the visiblity against a list of values.""" + + values: Nel[Visibility] + + @property + def field(self) -> Field: + """The field name.""" + return Field.visibility + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",") + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_visibility_is(self) + + +@dataclass +class CreatedByIs(FieldComparison): + """Compare the keyword against a list of values.""" + + values: Nel[str] + + @property + def field(self) -> Field: + """The field name.""" + return Field.created_by + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",", Helper.quote) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_created_by_is(self) + + +@dataclass +class Created(FieldComparison): + """Compare the created timestamp.""" + + cmp_op: Comparison + values: Nel[DateTimeRef] + + @property + def field(self) -> Field: + """The field name.""" + return Field.created + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return self.cmp_op + + def _render_value(self) -> str: + return self.values.mk_string(",", lambda e: e.render()) + + @classmethod + def eq(cls, value: DateTimeRef, *args: DateTimeRef) -> Created: + """Create an instance with `is_equal` comparison.""" + nel = Nel(value, list(args)) + return Created(Comparison.is_equal, nel) + + @classmethod + def lt(cls, value: DateTimeRef, *args: DateTimeRef) -> Created: + """Create an instance with `is_lower_than` comparison.""" + nel = Nel(value, list(args)) + return Created(Comparison.is_lower_than, nel) + + @classmethod + def gt(cls, value: DateTimeRef, *args: DateTimeRef) -> Created: + """Create an instance with `is_greater_than` comparison.""" + nel = Nel(value, list(args)) + return Created(Comparison.is_greater_than, nel) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_created(self) + + +@dataclass +class RoleIs(FieldComparison): + """Compare a role.""" + + values: Nel[Role] + + @property + def field(self) -> Field: + """The field name.""" + return Field.role + + @property + def cmp(self) -> Comparison: + """The comparison operation.""" + return Comparison.is_equal + + def _render_value(self) -> str: + return self.values.mk_string(",") + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_role_is(self) + + +@dataclass +class Text(SegmentBase): + """A query part that is not corresponding to a specific field.""" + + value: str + + def render(self) -> str: + """Return the value.""" + return self.value + + def append(self, next: Self) -> Self: + """Appends a text to this one.""" + if self.value == "": + return next + elif next.value == "": + return self + else: + return type(self)(self.value + " " + next.value) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_text(self) + + +class SortableField(StrEnum): + """A field supported for sorting.""" + + fname = "name" + created = "created" + score = "score" + + +@dataclass +class OrderBy: + """A order specification.""" + + field: SortableField + direction: SortDirection + + @classmethod + def from_tuple(cls, t: tuple[SortableField, SortDirection]) -> OrderBy: + """Create an OrderBy value from a tuple.""" + return OrderBy(t[0], t[1]) + + def render(self) -> str: + """Renders the string representation.""" + return f"{self.field.value}-{self.direction.value}" + + +@dataclass +class Order(SegmentBase): + """A query part for defining how to order results.""" + + fields: Nel[OrderBy] + + def render(self) -> str: + """Renders the string version of this query part.""" + return f"sort:{self.fields.mk_string(",", lambda e: e.render())}" + + def append(self, other: Self) -> Self: + """Append the field list of `other` to this.""" + return type(self)(self.fields.append(other.fields)) + + async def accept(self, visitor: SegmentVisitior) -> None: + """Apply this to the visitor.""" + return await visitor.visit_order(self) + + +type FieldTerm = ( + TypeIs + | IdIs + | NameIs + | SlugIs + | VisibilityIs + | KeywordIs + | NamespaceIs + | CreatedByIs + | Created + | RoleIs + | InheritedMemberIs + | DirectMemberIs +) + + +type Segment = FieldTerm | Text | Order + + +class Segments: + """Helpers for creating segments.""" + + @classmethod + def order(cls, o: OrderBy, *args: OrderBy) -> Segment: + """Return an order segment.""" + return Order(Nel(o, list(args))) + + @classmethod + def inherited_member_is(cls, user: UserDef, *args: UserDef) -> Segment: + """Return member-is query segment.""" + return InheritedMemberIs(Nel(user, list(args))) + + @classmethod + def direct_member_is(cls, user: UserDef, *args: UserDef) -> Segment: + """Return member-is query segment.""" + return DirectMemberIs(Nel(user, list(args))) + + @classmethod + def text(cls, text: str) -> Segment: + """Return a free text query segment.""" + return Text(text) + + @classmethod + def sort_by(cls, s: tuple[SortableField, SortDirection], *args: tuple[SortableField, SortDirection]) -> Segment: + """Return a sort query segment.""" + rest = list(map(OrderBy.from_tuple, args)) + return Order(Nel(OrderBy.from_tuple(s), rest)) + + @classmethod + def type_is(cls, et: EntityType, *args: EntityType) -> Segment: + """Return type-is query segment.""" + return TypeIs(Nel(et, list(args))) + + @classmethod + def id_is(cls, id: str, *args: str) -> Segment: + """Return id-is query segment.""" + return IdIs(Nel(id, list(args))) + + @classmethod + def name_is(cls, name: str, *args: str) -> Segment: + """Return name-is query segment.""" + return NameIs(Nel(name, list(args))) + + @classmethod + def slug_is(cls, slug: str, *args: str) -> Segment: + """Return slug-is query segment.""" + return SlugIs(Nel(slug, list(args))) + + @classmethod + def visibility_is(cls, vis: Visibility, *args: Visibility) -> Segment: + """Return visibility-is query segment.""" + return VisibilityIs(Nel(vis, list(args))) + + @classmethod + def keyword_is(cls, kw: str, *args: str) -> Segment: + """Return keyword-is query segment.""" + return KeywordIs(Nel(kw, list(args))) + + @classmethod + def namespace_is(cls, ns: str, *args: str) -> Segment: + """Return namespace-is query segment.""" + return NamespaceIs(Nel(ns, list(args))) + + @classmethod + def created_by_is(cls, id: str, *args: str) -> Segment: + """Return created_by-is query segment.""" + return CreatedByIs(Nel(id, list(args))) + + @classmethod + def created(cls, cmp: Comparison, date: DateTimeRef, *args: DateTimeRef) -> Segment: + """Return created query segment.""" + return Created(cmp, Nel(date, list(args))) + + @classmethod + def created_is(cls, date: DateTimeRef, *args: DateTimeRef) -> Segment: + """Return created-is query segment.""" + return cls.created(Comparison.is_equal, date, *args) + + @classmethod + def created_is_lt(cls, date: DateTimeRef, *args: DateTimeRef) -> Segment: + """Return created-< query segment.""" + return cls.created(Comparison.is_lower_than, date, *args) + + @classmethod + def created_is_gt(cls, date: DateTimeRef, *args: DateTimeRef) -> Segment: + """Return created-> query segment.""" + return cls.created(Comparison.is_greater_than, date, *args) + + @classmethod + def role_is(cls, role: Role, *args: Role) -> Segment: + """Return role-is query segment.""" + return RoleIs(Nel(role, list(args))) + + +@dataclass +class UserQuery: + """A user search query. + + The list of segments can be empty for the empty query. + """ + + segments: list[Segment] + + @classmethod + def of(cls, *args: Segment) -> UserQuery: + """Constructor using varargs.""" + return UserQuery(list(args)) + + def render(self) -> str: + """Return the string representation of this query.""" + return " ".join([e.render() for e in self.segments]) + + async def accept[T](self, visitor: UserQueryVisitor[T]) -> T: + """Apply the visitor.""" + for s in self.segments: + await s.accept(visitor) + return await visitor.build() + + async def transform(self, visitor: UserQueryVisitor[UserQuery], *args: UserQueryVisitor[UserQuery]) -> UserQuery: + """Apply this query to the given transformations sequentially.""" + transforms: list[UserQueryVisitor[UserQuery]] = [visitor] + transforms.extend(list(args)) + + acc = self + [acc := await acc.accept(t) for t in transforms] + return acc + + +class SegmentVisitior(ABC): + """A visitor for a query segment.""" + + @abstractmethod + async def visit_order(self, order: Order) -> None: + """Visit order.""" + ... + + @abstractmethod + async def visit_text(self, text: Text) -> None: + """Visit text.""" + ... + + @abstractmethod + async def visit_type_is(self, ft: TypeIs) -> None: + """Visit type-is.""" + ... + + @abstractmethod + async def visit_id_is(self, ft: IdIs) -> None: + """Visit id-is.""" + ... + + @abstractmethod + async def visit_name_is(self, ft: NameIs) -> None: + """Visit name-is.""" + ... + + @abstractmethod + async def visit_slug_is(self, ft: SlugIs) -> None: + """Visit slug-is.""" + ... + + @abstractmethod + async def visit_visibility_is(self, ft: VisibilityIs) -> None: + """Visit visibility-is.""" + ... + + @abstractmethod + async def visit_keyword_is(self, ft: KeywordIs) -> None: + """Visit keyword-is.""" + ... + + @abstractmethod + async def visit_namespace_is(self, ft: NamespaceIs) -> None: + """Visit namespace-is.""" + ... + + @abstractmethod + async def visit_created_by_is(self, ft: CreatedByIs) -> None: + """Visit created-by-is.""" + ... + + @abstractmethod + async def visit_created(self, ft: Created) -> None: + """Visit created.""" + ... + + @abstractmethod + async def visit_role_is(self, ft: RoleIs) -> None: + """Visit role-is.""" + ... + + @abstractmethod + async def visit_direct_member_is(self, ft: DirectMemberIs) -> None: + """Visit direct-member-is.""" + ... + + @abstractmethod + async def visit_inherited_member_is(self, ft: InheritedMemberIs) -> None: + """Visit inherited-member-is.""" + ... + + +class UserQueryVisitor[T](SegmentVisitior): + """A visitor to transform user queries.""" + + @abstractmethod + async def build(self) -> T: + """Return the value.""" + ... + + +class UserQueryFieldTermVisitor[T](UserQueryVisitor[T]): + """A variant of a visitor dispatching on the base union type Segment. + + Every concrete visit_ method forwards to the `visit_field_term` method. + """ + + @abstractmethod + async def visit_field_term(self, ft: FieldTerm) -> None: + """Visit a field term query segment.""" + ... + + @override + async def visit_created(self, ft: Created) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_created_by_is(self, ft: CreatedByIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_direct_member_is(self, ft: DirectMemberIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_id_is(self, ft: IdIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_keyword_is(self, ft: KeywordIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_inherited_member_is(self, ft: InheritedMemberIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_name_is(self, ft: NameIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_namespace_is(self, ft: NamespaceIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_role_is(self, ft: RoleIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_slug_is(self, ft: SlugIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_type_is(self, ft: TypeIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + @override + async def visit_visibility_is(self, ft: VisibilityIs) -> None: + """Forwards to `visit_field_term`.""" + return await self.visit_field_term(ft) + + +class EmptyUserQueryVisitor[T](UserQueryFieldTermVisitor[T]): + """A visitor with every method doing nothing. + + The `build` method is left to implement by subclasses. + """ + + @override + async def visit_field_term(self, ft: FieldTerm) -> None: + """Visit field-term node.""" + return None + + @override + async def visit_order(self, order: Order) -> None: + """Visit order node.""" + return None + + @override + async def visit_text(self, text: Text) -> None: + """Visit text node.""" + return None diff --git a/components/renku_data_services/search/user_query_parser.py b/components/renku_data_services/search/user_query_parser.py new file mode 100644 index 000000000..34adf8c85 --- /dev/null +++ b/components/renku_data_services/search/user_query_parser.py @@ -0,0 +1,226 @@ +"""Parser for the user query ast.""" + +from __future__ import annotations + +import datetime +from typing import cast + +from parsy import ( + Parser, + char_from, + decimal_digit, + fail, + from_enum, + regex, + seq, + string, + success, + test_char, +) + +from renku_data_services.app_config import logging +from renku_data_services.authz.models import Role, Visibility +from renku_data_services.base_models.core import NamespaceSlug +from renku_data_services.base_models.nel import Nel +from renku_data_services.search.user_query import ( + Comparison, + Created, + CreatedByIs, + DateTimeCalc, + DirectMemberIs, + Field, + Helper, + IdIs, + InheritedMemberIs, + KeywordIs, + NameIs, + NamespaceIs, + Order, + OrderBy, + PartialDate, + PartialDateTime, + PartialTime, + RelativeDate, + RoleIs, + SlugIs, + SortableField, + Text, + TypeIs, + UserId, + Username, + UserQuery, + VisibilityIs, +) +from renku_data_services.search.user_query_process import CollapseMembers, CollapseText +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.solr_client import SortDirection + +logger = logging.getLogger(__name__) + + +def _check_range(n: int, min: int, max: int, msg: str) -> Parser: + if n < min or n > max: + return fail(msg) + else: + return success(n) + + +def _check_month(m: int) -> Parser: + return _check_range(m, 1, 12, "Expect a month 1-12") + + +def _check_day(day: int) -> Parser: + return _check_range(day, 1, 31, "Expect a day 1-31") + + +def _check_hour(h: int) -> Parser: + return _check_range(h, 0, 23, "Expect a hour 0-23") + + +def _check_minute(m: int) -> Parser: + return _check_range(m, 0, 59, "Expect a minute or second 0-59") + + +# Parser[DateTimeCalc] +def _create_datetime_calc(args: tuple[PartialDateTime | RelativeDate, str, int]) -> Parser: + ref: PartialDateTime | RelativeDate = args[0] + sep: str = args[1] + days: int = args[2] + match sep: + case "+": + return success(DateTimeCalc(ref, days.__abs__(), False)) + case "-": + return success(DateTimeCalc(ref, days.__abs__() * -1, False)) + case "/": + return success(DateTimeCalc(ref, days.__abs__(), True)) + case _: + return fail(f"Invalid date-time separator: {sep}") + + +# Parser[FieldTerm] +def _make_field_term(args: tuple[str, Nel[str]]) -> Parser: + field: str = args[0] + values: Nel[str] = args[1] + f = Field(field.lower()) + match f: + case Field.fname: + return success(NameIs(values)) + case Field.slug: + return success(SlugIs(values)) + case Field.id: + return success(IdIs(values)) + case Field.keyword: + return success(KeywordIs(values)) + case Field.namespace: + return success(NamespaceIs(values)) + case Field.created_by: + return success(CreatedByIs(values)) + case _: + return fail(f"Invalid field name: {field}") + + +class _DateTimeParser: + colon: Parser = string(":") + dash: Parser = string("-") + non_zero_digit: Parser = char_from("123456789") + utcZ: Parser = (string("Z") | string("z")).result(datetime.UTC) + + year: Parser = (non_zero_digit + decimal_digit.times(min=3, max=3).concat()).map(int) + month: Parser = decimal_digit.times(min=1, max=2).concat().map(int).bind(_check_month) + dom: Parser = decimal_digit.times(min=1, max=2).concat().map(int).bind(_check_day) + hour: Parser = decimal_digit.times(min=1, max=2).concat().map(int).bind(_check_hour) + minsec: Parser = decimal_digit.times(min=1, max=2).concat().map(int).bind(_check_minute) + ndays: Parser = (non_zero_digit + decimal_digit.many().concat()).map(int) << char_from("dD") + + partial_date: Parser = seq(year, (dash >> month).optional(), (dash >> dom).optional()).combine(PartialDate) + partial_time: Parser = seq(hour, (colon >> minsec).optional(), (colon >> minsec).optional()).combine(PartialTime) + partial_datetime: Parser = seq(partial_date, (string("T") >> partial_time).optional(), utcZ.optional()).combine( + PartialDateTime + ) + + relative_date: Parser = from_enum(RelativeDate, lambda s: s.lower()) + + datetime_calc: Parser = seq(partial_datetime | relative_date, char_from("+-/"), ndays).bind(_create_datetime_calc) + + datetime_ref: Parser = datetime_calc | partial_datetime | relative_date + + +class _ParsePrimitives: + dp: _DateTimeParser = _DateTimeParser() + whitespace: Parser = regex(r"\s*") + comma: Parser = string(",") << whitespace + + char_basic: Parser = test_char(func=Helper.is_valid_char, description="simple string") + char_esc: Parser = string("\\") >> (string('"') | string("\\")) + no_quote: Parser = test_char(lambda c: c != '"', description="no quote") + + string_basic: Parser = char_basic.at_least(1).concat() + string_quoted: Parser = string('"') >> (char_esc | no_quote).at_least(1).concat() << string('"') + string_value: Parser = string_quoted | string_basic + + string_values: Parser = string_value.sep_by(comma, min=1).map(Nel.unsafe_from_list) + + sortable_field: Parser = from_enum(SortableField, lambda s: s.lower()) + sort_direction: Parser = from_enum(SortDirection, lambda s: s.lower()) + entity_type: Parser = from_enum(EntityType, lambda s: s.lower()) + visibility: Parser = from_enum(Visibility, lambda s: s.lower()) + role: Parser = from_enum(Role, lambda s: s.lower()) + + is_equal: Parser = string(Comparison.is_equal.value).result(Comparison.is_equal) + is_gt: Parser = string(Comparison.is_greater_than).result(Comparison.is_greater_than) + is_lt: Parser = string(Comparison.is_lower_than).result(Comparison.is_lower_than) + comparison: Parser = from_enum(Comparison, lambda s: s.lower()) + + ordered_by: Parser = seq((sortable_field << string("-")), sort_direction).combine(OrderBy) + + ordered_by_nel: Parser = ordered_by.sep_by(comma, min=1).map(Nel.unsafe_from_list) + entity_type_nel: Parser = entity_type.sep_by(comma, min=1).map(Nel.unsafe_from_list) + visibility_nel: Parser = visibility.sep_by(comma, min=1).map(Nel.unsafe_from_list) + role_nel: Parser = role.sep_by(comma, min=1).map(Nel.unsafe_from_list) + datetime_ref_nel: Parser = dp.datetime_ref.sep_by(comma, min=1).map(Nel.unsafe_from_list) + + sort_term: Parser = string("sort") >> is_equal >> ordered_by_nel.map(Order) + + type_is: Parser = string(Field.type.value, lambda s: s.lower()) >> is_equal >> entity_type_nel.map(TypeIs) + visibility_is: Parser = (string(Field.visibility.value, lambda s: s.lower()) >> is_equal >> visibility_nel).map( + VisibilityIs + ) + created: Parser = string(Field.created.value, lambda s: s.lower()) >> seq(comparison, datetime_ref_nel).combine( + Created + ) + role_is: Parser = string(Field.role.value, lambda s: s.lower()) >> is_equal >> role_nel.map(RoleIs) + + user_name: Parser = string("@") >> string_basic.map(NamespaceSlug.from_name).map(Username) + user_id: Parser = string_basic.map(UserId) + user_def_nel: Parser = (user_name | user_id).sep_by(comma, min=1).map(Nel.unsafe_from_list) + inherited_member_is: Parser = ( + string(Field.inherited_member.value, lambda s: s.lower()) >> is_equal >> user_def_nel.map(InheritedMemberIs) + ) + direct_member_is: Parser = ( + string(Field.direct_member.value, lambda s: s.lower()) >> is_equal >> user_def_nel.map(DirectMemberIs) + ) + + term_is: Parser = seq(from_enum(Field, lambda s: s.lower()) << is_equal, string_values).bind(_make_field_term) + + field_term: Parser = type_is | visibility_is | role_is | inherited_member_is | direct_member_is | created | term_is + free_text: Parser = test_char(lambda c: not c.isspace(), "string without spaces").at_least(1).concat().map(Text) + + segment: Parser = field_term | sort_term | free_text + + query: Parser = segment.sep_by(whitespace, min=0).map(UserQuery) + + +class QueryParser: + """Parsing user search queries.""" + + @classmethod + def parse_raw(cls, input: str) -> UserQuery: + """Parses the input string into a UserQuery, without any post processing.""" + pp = _ParsePrimitives() + return cast(UserQuery, pp.query.parse(input.strip())) + + @classmethod + async def parse(cls, input: str) -> UserQuery: + """Parses a user search query into its ast.""" + q = cls.parse_raw(input) + return await q.transform(CollapseMembers(), CollapseText()) diff --git a/components/renku_data_services/search/user_query_process.py b/components/renku_data_services/search/user_query_process.py new file mode 100644 index 000000000..44e6f0481 --- /dev/null +++ b/components/renku_data_services/search/user_query_process.py @@ -0,0 +1,128 @@ +"""Functions for working with user queries.""" + +from __future__ import annotations + +from typing import override + +from renku_data_services.app_config import logging +from renku_data_services.base_models.nel import Nel +from renku_data_services.search.user_query import ( + DirectMemberIs, + EmptyUserQueryVisitor, + FieldTerm, + InheritedMemberIs, + Order, + Segment, + Text, + TypeIs, + UserDef, + UserQuery, + UserQueryFieldTermVisitor, +) +from renku_data_services.solr.entity_documents import EntityType + +logger = logging.getLogger(__name__) + + +class CollectEntityTypes(EmptyUserQueryVisitor[set[EntityType] | None]): + """Gather all entity types that are requested.""" + + def __init__(self) -> None: + self.result: set[EntityType] | None = None + + async def visit_type_is(self, ft: TypeIs) -> None: + """Collect type-is nodes.""" + values = set(ft.values) + self.result = values if self.result is None else self.result.intersection(values) + + async def build(self) -> set[EntityType] | None: + """Return the collected entity types.""" + return self.result + + +class CollapseText(UserQueryFieldTermVisitor[UserQuery]): + """Collapses consecutive free text segments.""" + + def __init__(self) -> None: + self.segments: list[Segment] = [] + self.current: Text | None = None + + async def build(self) -> UserQuery: + """Return the modified query.""" + if self.current is not None: + self.segments.append(self.current) + return UserQuery(self.segments) + + async def visit_text(self, text: Text) -> None: + """Collect text nodes.""" + self.current = text if self.current is None else self.current.append(text) + return None + + def visit_other(self, seg: Order | FieldTerm) -> None: + """Append the current text segment and reset, then add `seg`.""" + if self.current is not None: + self.segments.append(self.current) + self.current = None + self.segments.append(seg) + + async def visit_order(self, order: Order) -> None: + """Visit order nodes.""" + return self.visit_other(order) + + async def visit_field_term(self, ft: FieldTerm) -> None: + """Visit field term nodes.""" + return self.visit_other(ft) + + +class CollapseMembers(UserQueryFieldTermVisitor[UserQuery]): + """Collapses member segments and limits values to a maximum.""" + + def __init__(self, maximum_member_count: int = 4) -> None: + self.maximum_member_count = maximum_member_count + self.segments: list[Segment] = [] + self.inherited_members: list[UserDef] = [] + self.direct_members: list[UserDef] = [] + + async def build(self) -> UserQuery: + """Return the query with member segments combined.""" + result: list[Segment] = [] + max = self.maximum_member_count + length = len(self.inherited_members) + len(self.direct_members) + if length > max: + logger.info(f"Removing {length - max} members from query, only {max} allowed!") + self.direct_members = self.direct_members[:max] + remaining = abs(max - len(self.direct_members)) + self.inherited_members = self.inherited_members[:remaining] + + nel = Nel.from_list(self.direct_members) + if nel is not None: + result.append(DirectMemberIs(nel)) + + nel = Nel.from_list(self.inherited_members) + if nel is not None: + result.append(InheritedMemberIs(nel)) + + self.direct_members = [] + self.inherited_members = [] + self.segments.extend(result) + return UserQuery(self.segments) + + @override + async def visit_inherited_member_is(self, ft: InheritedMemberIs) -> None: + self.inherited_members.extend(ft.users) + + @override + async def visit_direct_member_is(self, ft: DirectMemberIs) -> None: + self.direct_members.extend(ft.users) + + async def visit_order(self, order: Order) -> None: + """Collect order nodes.""" + self.segments.append(order) + + async def visit_text(self, text: Text) -> None: + """Collect text nodes.""" + self.segments.append(text) + + async def visit_field_term(self, ft: FieldTerm) -> None: + """Collect remaining terms.""" + self.segments.append(ft) diff --git a/components/renku_data_services/secrets/api.spec.yaml b/components/renku_data_services/secrets/api.spec.yaml index df85bf7ce..d45fde9f1 100644 --- a/components/renku_data_services/secrets/api.spec.yaml +++ b/components/renku_data_services/secrets/api.spec.yaml @@ -96,10 +96,15 @@ components: description: A mapping between secret_ids and names where names will be used as key values in the created K8s secret. type: object additionalProperties: - type: string + oneOf: + - type: string + - type: array + items: + type: string example: 01J2F86WS7S8N6159WA3X4042H: access_key_id 01J2F86XPW43YW2WRT4RZ1CNZ9: secret_access_key + 4STRRKAQ0428936ZP9R9YWN2X7: ["my_key", "my_copied_key"] ErrorResponse: type: object properties: diff --git a/components/renku_data_services/secrets/apispec.py b/components/renku_data_services/secrets/apispec.py index 9238dbede..c28af21bc 100644 --- a/components/renku_data_services/secrets/apispec.py +++ b/components/renku_data_services/secrets/apispec.py @@ -1,10 +1,10 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-08-20T07:15:21+00:00 +# timestamp: 2025-03-19T10:21:09+00:00 from __future__ import annotations -from typing import Dict, List, Optional +from typing import Dict, List, Optional, Union from pydantic import ConfigDict, Field, RootModel from renku_data_services.secrets.apispec_base import BaseAPISpec @@ -25,11 +25,13 @@ class Ulid(RootModel[str]): class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): @@ -43,10 +45,10 @@ class K8sSecret(BaseAPISpec): name: str = Field( ..., description="The name of the k8s secret to create", - example="john-doe-session-57-secret", + examples=["john-doe-session-57-secret"], ) namespace: str = Field( - ..., description="The namespace of the k8s secret to create", example="renku" + ..., description="The namespace of the k8s secret to create", examples=["renku"] ) secret_ids: List[Ulid] = Field( ..., description="The ids of the secrets to include", min_length=1 @@ -54,13 +56,15 @@ class K8sSecret(BaseAPISpec): owner_references: List[Dict[str, str]] = Field( ..., description="The resource in K8s that owns this secret", - example=[ - { - "apiVersion": "amalthea.dev/v1alpha1", - "kind": "JupyterServer", - "name": "renku-1234", - "uid": "c9328118-8d32-41b4-b9bd-1437880c95a2", - } + examples=[ + [ + { + "apiVersion": "amalthea.dev/v1alpha1", + "kind": "JupyterServer", + "name": "renku-1234", + "uid": "c9328118-8d32-41b4-b9bd-1437880c95a2", + } + ] ], ) - key_mapping: Optional[Dict[str, str]] = None + key_mapping: Optional[Dict[str, Union[str, List[str]]]] = None diff --git a/components/renku_data_services/secrets/blueprints.py b/components/renku_data_services/secrets/blueprints.py index ac6eae3cc..22f95ed84 100644 --- a/components/renku_data_services/secrets/blueprints.py +++ b/components/renku_data_services/secrets/blueprints.py @@ -14,7 +14,7 @@ from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface from renku_data_services.secrets import apispec from renku_data_services.secrets.core import create_k8s_secret -from renku_data_services.secrets.db import UserSecretsRepo +from renku_data_services.secrets.db import LowLevelUserSecretsRepo from renku_data_services.secrets.models import OwnerReference @@ -23,7 +23,7 @@ class K8sSecretsBP(CustomBlueprint): """Handlers for using user secrets in K8s.""" authenticator: base_models.Authenticator - user_secrets_repo: UserSecretsRepo + user_secrets_repo: LowLevelUserSecretsRepo secret_service_private_key: rsa.RSAPrivateKey previous_secret_service_private_key: rsa.RSAPrivateKey | None core_client: K8sCoreClientInterface diff --git a/components/renku_data_services/secrets/config.py b/components/renku_data_services/secrets/config.py index c4f7d4d38..a4cb2bfa6 100644 --- a/components/renku_data_services/secrets/config.py +++ b/components/renku_data_services/secrets/config.py @@ -1,126 +1,91 @@ """Configurations.""" import os +import secrets from dataclasses import dataclass, field from pathlib import Path -from typing import Any +from typing import Self from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.hazmat.primitives.asymmetric.types import PrivateKeyTypes -from jwt import PyJWKClient -from yaml import safe_load -import renku_data_services.secrets -from renku_data_services import base_models, errors -from renku_data_services.authn.dummy import DummyAuthenticator -from renku_data_services.authn.keycloak import KeycloakAuthenticator -from renku_data_services.db_config.config import DBConfig -from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface -from renku_data_services.k8s.clients import DummyCoreClient, K8sCoreClient -from renku_data_services.secrets.db import UserSecretsRepo -from renku_data_services.utils.core import oidc_discovery +from renku_data_services import errors @dataclass -class Config: - """Secrets service config.""" +class PublicSecretsConfig: + """Configuration class for secrets settings.""" - db: DBConfig - authenticator: base_models.Authenticator - secrets_service_private_key: rsa.RSAPrivateKey - previous_secrets_service_private_key: rsa.RSAPrivateKey | None - core_client: K8sCoreClientInterface - app_name: str = "secrets_storage" - version: str = "0.0.1" - spec: dict[str, Any] = field(init=False, default_factory=dict) - _user_secrets_repo: UserSecretsRepo | None = field(default=None, repr=False, init=False) + public_key: rsa.RSAPublicKey + encryption_key: bytes = field(repr=False) - def __post_init__(self) -> None: - spec_file = Path(renku_data_services.secrets.__file__).resolve().parent / "api.spec.yaml" - with open(spec_file) as f: - self.spec = safe_load(f) + @classmethod + def from_env(cls) -> Self: + """Load config from environment variables.""" + if os.environ.get("DUMMY_STORES", "false").lower() == "true": + public_key_path = os.getenv("SECRETS_SERVICE_PUBLIC_KEY_PATH") + encryption_key = secrets.token_bytes(32) + if public_key_path is not None: + public_key = serialization.load_pem_public_key(Path(public_key_path).read_bytes()) + else: + # generate new random key + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + public_key = private_key.public_key() + else: + public_key_path = os.getenv("SECRETS_SERVICE_PUBLIC_KEY_PATH", "/secret_service_public_key") + encryption_key_path = os.getenv("ENCRYPTION_KEY_PATH", "encryption_key") + encryption_key = Path(encryption_key_path).read_bytes() + public_key_path = os.getenv("SECRETS_SERVICE_PUBLIC_KEY_PATH", "/secret_service_public_key") + public_key = serialization.load_pem_public_key(Path(public_key_path).read_bytes()) + if not isinstance(public_key, rsa.RSAPublicKey): + raise errors.ConfigurationError(message="Secret service public key is not an RSAPublicKey") - @property - def user_secrets_repo(self) -> UserSecretsRepo: - """The DB adapter for users.""" - if not self._user_secrets_repo: - self._user_secrets_repo = UserSecretsRepo( - session_maker=self.db.async_session_maker, - ) - return self._user_secrets_repo + return cls( + public_key=public_key, + encryption_key=encryption_key, + ) - @classmethod - def from_env(cls, prefix: str = "") -> "Config": - """Create a config from environment variables.""" - authenticator: base_models.Authenticator - core_client: K8sCoreClientInterface - secrets_service_private_key: PrivateKeyTypes - previous_secrets_service_private_key: PrivateKeyTypes | None = None - db = DBConfig.from_env(prefix) - version = os.environ.get(f"{prefix}VERSION", "0.0.1") +@dataclass +class PrivateSecretsConfig: + """Private configuration for the secrets service. + + IMPORTANT: To only be used inside secrets service. + """ - if os.environ.get(f"{prefix}DUMMY_STORES", "false").lower() == "true": - authenticator = DummyAuthenticator() - core_client = DummyCoreClient({}, {}) - secrets_service_private_key_path = os.getenv(f"{prefix}SECRETS_SERVICE_PRIVATE_KEY_PATH") - if secrets_service_private_key_path: - secrets_service_private_key = serialization.load_pem_private_key( - Path(secrets_service_private_key_path).read_bytes(), password=None - ) + private_key: rsa.RSAPrivateKey + previous_private_key: rsa.RSAPrivateKey | None + + @classmethod + def from_env(cls) -> Self: + """Load config from environment.""" + previous_private_key: PrivateKeyTypes | None = None + if os.environ.get("DUMMY_STORES", "false").lower() == "true": + private_key_path = os.getenv("SECRETS_SERVICE_PRIVATE_KEY_PATH") + if private_key_path: + private_key = serialization.load_pem_private_key(Path(private_key_path).read_bytes(), password=None) else: - secrets_service_private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) - previous_secrets_service_private_key_path = os.getenv(f"{prefix}PREVIOUS_SECRETS_SERVICE_PRIVATE_KEY_PATH") + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + previous_secrets_service_private_key_path = os.getenv("PREVIOUS_SECRETS_SERVICE_PRIVATE_KEY_PATH") if previous_secrets_service_private_key_path: - previous_private_key = Path(previous_secrets_service_private_key_path).read_bytes() - if previous_private_key is not None and len(previous_private_key) > 0: - previous_secrets_service_private_key = serialization.load_pem_private_key( - previous_private_key, password=None + previous_private_key_content = Path(previous_secrets_service_private_key_path).read_bytes() + if previous_private_key_content is not None and len(previous_private_key_content) > 0: + previous_private_key = serialization.load_pem_private_key( + previous_private_key_content, password=None ) else: - keycloak_url = os.environ.get(f"{prefix}KEYCLOAK_URL") - if keycloak_url is None: - raise errors.ConfigurationError(message="The Keycloak URL has to be specified.") - keycloak_url = keycloak_url.rstrip("/") - keycloak_realm = os.environ.get(f"{prefix}KEYCLOAK_REALM", "Renku") - oidc_disc_data = oidc_discovery(keycloak_url, keycloak_realm) - jwks_url = oidc_disc_data.get("jwks_uri") - if jwks_url is None: - raise errors.ConfigurationError( - message="The JWKS url for Keycloak cannot be found from the OIDC discovery endpoint." - ) - algorithms = os.environ.get(f"{prefix}KEYCLOAK_TOKEN_SIGNATURE_ALGS") - if algorithms is None: - raise errors.ConfigurationError(message="At least one token signature algorithm is required.") - algorithms_lst = [i.strip() for i in algorithms.split(",")] - jwks = PyJWKClient(jwks_url) - authenticator = KeycloakAuthenticator(jwks=jwks, algorithms=algorithms_lst) - core_client = K8sCoreClient() - secrets_service_private_key_path = os.getenv( - f"{prefix}SECRETS_SERVICE_PRIVATE_KEY_PATH", "/secrets_service_private_key" - ) - secrets_service_private_key = serialization.load_pem_private_key( - Path(secrets_service_private_key_path).read_bytes(), password=None - ) - previous_secrets_service_private_key_path = os.getenv(f"{prefix}PREVIOUS_SECRETS_SERVICE_PRIVATE_KEY_PATH") + private_key_path = os.getenv("SECRETS_SERVICE_PRIVATE_KEY_PATH", "/secrets_service_private_key") + private_key = serialization.load_pem_private_key(Path(private_key_path).read_bytes(), password=None) + previous_secrets_service_private_key_path = os.getenv("PREVIOUS_SECRETS_SERVICE_PRIVATE_KEY_PATH") if previous_secrets_service_private_key_path and Path(previous_secrets_service_private_key_path).exists(): - previous_secrets_service_private_key = serialization.load_pem_private_key( + previous_private_key = serialization.load_pem_private_key( Path(previous_secrets_service_private_key_path).read_bytes(), password=None ) - if not isinstance(secrets_service_private_key, rsa.RSAPrivateKey): + if not isinstance(private_key, rsa.RSAPrivateKey): raise errors.ConfigurationError(message="Secret service private key is not an RSAPrivateKey") - if previous_secrets_service_private_key is not None and not isinstance( - previous_secrets_service_private_key, rsa.RSAPrivateKey - ): + if previous_private_key is not None and not isinstance(previous_private_key, rsa.RSAPrivateKey): raise errors.ConfigurationError(message="Old secret service private key is not an RSAPrivateKey") - return cls( - version=version, - db=db, - authenticator=authenticator, - secrets_service_private_key=secrets_service_private_key, - previous_secrets_service_private_key=previous_secrets_service_private_key, - core_client=core_client, - ) + return cls(private_key=private_key, previous_private_key=previous_private_key) diff --git a/components/renku_data_services/secrets/core.py b/components/renku_data_services/secrets/core.py index a2f9476ee..452ff06bc 100644 --- a/components/renku_data_services/secrets/core.py +++ b/components/renku_data_services/secrets/core.py @@ -2,17 +2,17 @@ import asyncio from base64 import b64encode +from typing import TYPE_CHECKING from cryptography.hazmat.primitives.asymmetric import rsa from kubernetes import client as k8s_client from prometheus_client import Counter, Enum -from sanic.log import logger from ulid import ULID from renku_data_services import base_models, errors +from renku_data_services.app_config import logging from renku_data_services.base_models.core import InternalServiceAdmin from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface -from renku_data_services.secrets.db import UserSecretsRepo from renku_data_services.secrets.models import OwnerReference, Secret from renku_data_services.users.db import UserRepo from renku_data_services.utils.cryptography import ( @@ -23,6 +23,11 @@ generate_random_encryption_key, ) +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from renku_data_services.secrets.db import LowLevelUserSecretsRepo + async def create_k8s_secret( user: base_models.APIUser, @@ -30,11 +35,11 @@ async def create_k8s_secret( namespace: str, secret_ids: list[ULID], owner_references: list[OwnerReference], - secrets_repo: UserSecretsRepo, + secrets_repo: "LowLevelUserSecretsRepo", secret_service_private_key: rsa.RSAPrivateKey, previous_secret_service_private_key: rsa.RSAPrivateKey | None, core_client: K8sCoreClientInterface, - key_mapping: dict[str, str] | None, + key_mapping: dict[str, str | list[str]] | None, ) -> None: """Creates a single k8s secret from a list of user secrets stored in the DB.""" secrets = await secrets_repo.get_secrets_by_ids(requested_by=user, secret_ids=secret_ids) @@ -44,10 +49,17 @@ async def create_k8s_secret( if len(missing_secret_ids) > 0: raise errors.MissingResourceError(message=f"Couldn't find secrets with ids {', '.join(missing_secret_ids)}") - if key_mapping: - if set(key_mapping) != requested_secret_ids: + def ensure_list(value: str | list[str]) -> list[str]: + return [value] if isinstance(value, str) else value + + key_mapping_with_lists_only = {key: ensure_list(key_mapping[key]) for key in key_mapping} if key_mapping else None + + if key_mapping_with_lists_only: + if key_mapping_with_lists_only.keys() != requested_secret_ids: raise errors.ValidationError(message="Key mapping must include all requested secret IDs") - if len(key_mapping) != len(set(key_mapping.values())): + + all_keys = [key for value in key_mapping_with_lists_only.values() for key in value] + if len(all_keys) != len(set(all_keys)): raise errors.ValidationError(message="Key mapping values are not unique") decrypted_secrets = {} @@ -63,11 +75,17 @@ async def create_k8s_secret( raise decrypted_value = decrypt_string(decryption_key, user.id, secret.encrypted_value).encode() # type: ignore - key = secret.name if not key_mapping else key_mapping[str(secret.id)] - decrypted_secrets[key] = b64encode(decrypted_value).decode() + + keys = ( + key_mapping_with_lists_only[str(secret.id)] + if key_mapping_with_lists_only + else [secret.default_filename] + ) + for key in keys: + decrypted_secrets[key] = b64encode(decrypted_value).decode() except Exception as e: # don't wrap the error, we don't want secrets accidentally leaking. - raise errors.SecretDecryptionError(message=f"An error occurred decrypting secrets: {str(type(e))}") + raise errors.SecretDecryptionError(message=f"An error occurred decrypting secrets: {str(type(e))}") from None owner_refs = [] if owner_references: @@ -96,14 +114,14 @@ async def create_k8s_secret( sanitized_secret, ) # don't wrap the error, we don't want secrets accidentally leaking. - raise errors.SecretCreationError(message=f"An error occurred creating secrets: {str(type(e))}") + raise errors.SecretCreationError(message=f"An error occurred creating secrets: {str(type(e))}") from None async def rotate_encryption_keys( requested_by: InternalServiceAdmin, new_key: rsa.RSAPrivateKey, old_key: rsa.RSAPrivateKey, - secrets_repo: UserSecretsRepo, + secrets_repo: "LowLevelUserSecretsRepo", batch_size: int = 100, ) -> None: """Rotate all secrets to a new private key. @@ -128,7 +146,7 @@ async def rotate_encryption_keys( if new_secret is not None: updated_secrets.append(new_secret) - await secrets_repo.update_secrets(requested_by, updated_secrets) + await secrets_repo.update_secret_values(requested_by, updated_secrets) processed_secrets_metrics.inc(len(updated_secrets)) except: running_metrics.state("errored") @@ -153,12 +171,12 @@ async def rotate_single_encryption_key( decryption_key = decrypt_rsa(old_key, secret.encrypted_key) decrypted_value = decrypt_string(decryption_key, user_id, secret.encrypted_value).encode() new_encryption_key = generate_random_encryption_key() - secret.encrypted_value = encrypt_string(new_encryption_key, user_id, decrypted_value.decode()) - secret.encrypted_key = encrypt_rsa(new_key.public_key(), new_encryption_key) + encrypted_value = encrypt_string(new_encryption_key, user_id, decrypted_value.decode()) + encrypted_key = encrypt_rsa(new_key.public_key(), new_encryption_key) + return secret.update_encrypted_value(encrypted_value=encrypted_value, encrypted_key=encrypted_key) except Exception as e: logger.error(f"Couldn't decrypt secret {secret.name}({secret.id}): {e}") return None - return secret async def encrypt_user_secret( diff --git a/components/renku_data_services/secrets/db.py b/components/renku_data_services/secrets/db.py index 45f32d3e9..73f9e5b23 100644 --- a/components/renku_data_services/secrets/db.py +++ b/components/renku_data_services/secrets/db.py @@ -1,23 +1,28 @@ """Database repo for secrets.""" +import random +import string from collections.abc import AsyncGenerator, Callable, Sequence from datetime import UTC, datetime, timedelta from typing import cast -from sqlalchemy import Select, delete, or_, select +from cryptography.hazmat.primitives.asymmetric import rsa +from sqlalchemy import delete, select from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession from ulid import ULID from renku_data_services.base_api.auth import APIUser, only_authenticated -from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId +from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId, Slug from renku_data_services.errors import errors -from renku_data_services.secrets.models import Secret, SecretKind, UnsavedSecret +from renku_data_services.secrets.core import encrypt_user_secret +from renku_data_services.secrets.models import Secret, SecretKind, SecretPatch, UnsavedSecret from renku_data_services.secrets.orm import SecretORM +from renku_data_services.users.db import UserRepo -class UserSecretsRepo: - """An adapter for accessing users secrets.""" +class LowLevelUserSecretsRepo: + """An adapter for accessing user secrets without encryption handling.""" def __init__( self, @@ -25,38 +30,6 @@ def __init__( ) -> None: self.session_maker = session_maker - def _get_stmt(self, requested_by: APIUser) -> Select[tuple[SecretORM]]: - return ( - select(SecretORM) - .where(SecretORM.user_id == requested_by.id) - .where( - or_( - SecretORM.expiration_timestamp.is_(None), - SecretORM.expiration_timestamp > datetime.now(UTC) + timedelta(seconds=120), - ) - ) - ) - - @only_authenticated - async def get_user_secrets(self, requested_by: APIUser, kind: SecretKind) -> list[Secret]: - """Get all user's secrets from the database.""" - async with self.session_maker() as session: - stmt = self._get_stmt(requested_by).where(SecretORM.kind == kind) - res = await session.execute(stmt) - orm = res.scalars().all() - return [o.dump() for o in orm] - - @only_authenticated - async def get_secret_by_id(self, requested_by: APIUser, secret_id: ULID) -> Secret | None: - """Get a specific user secret from the database.""" - async with self.session_maker() as session: - stmt = self._get_stmt(requested_by).where(SecretORM.id == secret_id) - res = await session.execute(stmt) - orm = res.scalar_one_or_none() - if orm is None: - return None - return orm.dump() - @only_authenticated async def get_secrets_by_ids(self, requested_by: APIUser, secret_ids: list[ULID]) -> list[Secret]: """Get a specific user secrets from the database.""" @@ -67,69 +40,6 @@ async def get_secrets_by_ids(self, requested_by: APIUser, secret_ids: list[ULID] orms = res.scalars() return [orm.dump() for orm in orms] - @only_authenticated - async def insert_secret(self, requested_by: APIUser, secret: UnsavedSecret) -> Secret: - """Insert a new secret.""" - - async with self.session_maker() as session, session.begin(): - orm = SecretORM( - name=secret.name, - user_id=cast(str, requested_by.id), - encrypted_value=secret.encrypted_value, - encrypted_key=secret.encrypted_key, - kind=secret.kind, - expiration_timestamp=secret.expiration_timestamp, - ) - session.add(orm) - - try: - await session.flush() - except IntegrityError as err: - if len(err.args) > 0 and "UniqueViolationError" in err.args[0]: - raise errors.ValidationError( - message="The name for the secret should be unique but it already exists", - detail="Please modify the name field and then retry", - ) - else: - raise - return orm.dump() - - @only_authenticated - async def update_secret( - self, - requested_by: APIUser, - secret_id: ULID, - encrypted_value: bytes, - encrypted_key: bytes, - expiration_timestamp: datetime | None, - ) -> Secret: - """Update a secret.""" - - async with self.session_maker() as session, session.begin(): - result = await session.execute(self._get_stmt(requested_by).where(SecretORM.id == secret_id)) - secret = result.scalar_one_or_none() - if secret is None: - raise errors.MissingResourceError(message=f"The secret with id '{secret_id}' cannot be found") - - secret.update( - encrypted_value=encrypted_value, - encrypted_key=encrypted_key, - expiration_timestamp=expiration_timestamp, - ) - return secret.dump() - - @only_authenticated - async def delete_secret(self, requested_by: APIUser, secret_id: ULID) -> None: - """Delete a secret.""" - - async with self.session_maker() as session, session.begin(): - result = await session.execute(self._get_stmt(requested_by).where(SecretORM.id == secret_id)) - secret = result.scalar_one_or_none() - if secret is None: - return None - - await session.execute(delete(SecretORM).where(SecretORM.id == secret.id)) - async def get_all_secrets_batched( self, requested_by: InternalServiceAdmin, batch_size: int = 100 ) -> AsyncGenerator[Sequence[tuple[Secret, str]], None]: @@ -153,8 +63,8 @@ async def get_all_secrets_batched( offset += batch_size - async def update_secrets(self, requested_by: InternalServiceAdmin, secrets: list[Secret]) -> None: - """Update multiple secrets. + async def update_secret_values(self, requested_by: InternalServiceAdmin, secrets: list[Secret]) -> None: + """Update multiple secret values at once. Only for internal use. """ @@ -180,3 +90,130 @@ async def update_secrets(self, requested_by: InternalServiceAdmin, secrets: list secret.modification_date = datetime.now(UTC).replace(microsecond=0) await session.flush() + + +class UserSecretsRepo: + """An adapter for accessing users secrets with encryption handling.""" + + def __init__( + self, + session_maker: Callable[..., AsyncSession], + low_level_repo: LowLevelUserSecretsRepo, + user_repo: UserRepo, + secret_service_public_key: rsa.RSAPublicKey, + ) -> None: + self.session_maker = session_maker + self.low_level_repo = low_level_repo + self.user_repo = user_repo + self.secret_service_public_key = secret_service_public_key + + @only_authenticated + async def get_user_secrets(self, requested_by: APIUser, kind: SecretKind) -> list[Secret]: + """Get all user's secrets from the database.""" + async with self.session_maker() as session: + stmt = select(SecretORM).where(SecretORM.user_id == requested_by.id).where(SecretORM.kind == kind) + res = await session.execute(stmt) + orm = res.scalars().all() + return [o.dump() for o in orm] + + @only_authenticated + async def get_secret_by_id(self, requested_by: APIUser, secret_id: ULID) -> Secret: + """Get a specific user secret from the database.""" + async with self.session_maker() as session: + stmt = select(SecretORM).where(SecretORM.user_id == requested_by.id).where(SecretORM.id == secret_id) + res = await session.execute(stmt) + orm = res.scalar_one_or_none() + if not orm: + raise errors.MissingResourceError(message=f"The secret with id {secret_id} cannot be found.") + return orm.dump() + + async def insert_secret(self, requested_by: APIUser, secret: UnsavedSecret) -> Secret: + """Insert a new secret.""" + if requested_by.id is None: + raise errors.UnauthorizedError(message="You have to be authenticated to perform this operation.") + + default_filename = secret.default_filename + if default_filename is None: + suffix = "".join([random.choice(string.ascii_lowercase + string.digits) for _ in range(8)]) # nosec B311 + name_slug = Slug.from_name(secret.name).value + default_filename = f"{name_slug[:200]}-{suffix}" + + encrypted_value, encrypted_key = await encrypt_user_secret( + user_repo=self.user_repo, + requested_by=requested_by, + secret_service_public_key=self.secret_service_public_key, + secret_value=secret.secret_value, + ) + + async with self.session_maker() as session, session.begin(): + secret_orm = SecretORM( + name=secret.name, + default_filename=default_filename, + user_id=requested_by.id, + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + kind=secret.kind, + ) + session.add(secret_orm) + + try: + await session.flush() + except IntegrityError as err: + if len(err.args) > 0 and "UniqueViolationError" in err.args[0]: + raise errors.ValidationError( + message="The default_filename for the secret should be unique but it already exists", + detail="Please modify the default_filename field and then retry", + ) from None + else: + raise + return secret_orm.dump() + + @only_authenticated + async def update_secret(self, requested_by: APIUser, secret_id: ULID, patch: SecretPatch) -> Secret: + """Update a secret.""" + + async with self.session_maker() as session, session.begin(): + result = await session.execute( + select(SecretORM).where(SecretORM.id == secret_id).where(SecretORM.user_id == requested_by.id) + ) + secret = result.scalar_one_or_none() + if secret is None: + raise errors.MissingResourceError(message=f"The secret with id '{secret_id}' cannot be found") + + if patch.name is not None: + secret.name = patch.name + if patch.default_filename is not None and patch.default_filename != secret.default_filename: + existing_secret = await session.scalar( + select(SecretORM) + .where(SecretORM.user_id == requested_by.id) + .where(SecretORM.default_filename == patch.default_filename) + ) + if existing_secret is not None: + raise errors.ConflictError( + message=f"A user secret with the default filename '{patch.default_filename}' already exists." + ) + secret.default_filename = patch.default_filename + if patch.secret_value is not None: + encrypted_value, encrypted_key = await encrypt_user_secret( + user_repo=self.user_repo, + requested_by=requested_by, + secret_service_public_key=self.secret_service_public_key, + secret_value=patch.secret_value, + ) + secret.update(encrypted_value=encrypted_value, encrypted_key=encrypted_key) + + return secret.dump() + + @only_authenticated + async def delete_secret(self, requested_by: APIUser, secret_id: ULID) -> None: + """Delete a secret.""" + + async with self.session_maker() as session, session.begin(): + result = await session.execute( + select(SecretORM).where(SecretORM.id == secret_id).where(SecretORM.user_id == requested_by.id) + ) + secret = result.scalar_one_or_none() + if secret is None: + return None + + await session.execute(delete(SecretORM).where(SecretORM.id == secret.id)) diff --git a/components/renku_data_services/secrets/models.py b/components/renku_data_services/secrets/models.py index 491a25d01..de604577c 100644 --- a/components/renku_data_services/secrets/models.py +++ b/components/renku_data_services/secrets/models.py @@ -1,36 +1,51 @@ """Base models for secrets.""" -from dataclasses import dataclass -from datetime import UTC, datetime -from enum import Enum +from dataclasses import dataclass, field +from datetime import datetime +from enum import StrEnum from kubernetes import client as k8s_client -from pydantic import BaseModel, Field from ulid import ULID -class SecretKind(Enum): +class SecretKind(StrEnum): """Kind of secret. This should have the same values as users.apispec.SecretKind.""" general = "general" storage = "storage" -class UnsavedSecret(BaseModel): - """Secret objects not stored in the database.""" +@dataclass(frozen=True, eq=True, kw_only=True) +class Secret: + """Secret object stored in the database.""" + id: ULID name: str - encrypted_value: bytes = Field(repr=False) - encrypted_key: bytes = Field(repr=False) - modification_date: datetime = Field(default_factory=lambda: datetime.now(UTC).replace(microsecond=0), init=False) + default_filename: str + encrypted_value: bytes = field(repr=False) + encrypted_key: bytes = field(repr=False) kind: SecretKind - expiration_timestamp: datetime | None = Field(default=None) + modification_date: datetime + session_secret_slot_ids: list[ULID] + """List of session secret slot IDs where this user secret is used.""" -class Secret(UnsavedSecret): - """Secret object stored in the database.""" + data_connector_ids: list[ULID] + """List of data connector IDs where this user secret is used.""" - id: ULID = Field() + def update_encrypted_value(self, encrypted_value: bytes, encrypted_key: bytes) -> "Secret": + """Returns a new secret instance with updated encrypted_value and encrypted_key.""" + return Secret( + id=self.id, + name=self.name, + default_filename=self.default_filename, + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + kind=self.kind, + modification_date=self.modification_date, + session_secret_slot_ids=self.session_secret_slot_ids, + data_connector_ids=self.data_connector_ids, + ) @dataclass @@ -56,3 +71,22 @@ def to_k8s(self) -> k8s_client.V1OwnerReference: uid=self.uid, controller=True, ) + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UnsavedSecret: + """Model to request the creation of a new user secret.""" + + name: str + default_filename: str | None + secret_value: str = field(repr=False) + kind: SecretKind + + +@dataclass(frozen=True, eq=True, kw_only=True) +class SecretPatch: + """Model for changes requested on a user secret.""" + + name: str | None + default_filename: str | None + secret_value: str | None = field(repr=False) diff --git a/components/renku_data_services/secrets/orm.py b/components/renku_data_services/secrets/orm.py index 6b872dc94..37c6e11ba 100644 --- a/components/renku_data_services/secrets/orm.py +++ b/components/renku_data_services/secrets/orm.py @@ -1,23 +1,27 @@ """Secrets ORM.""" from datetime import UTC, datetime -from typing import Optional +from typing import TYPE_CHECKING, Optional from sqlalchemy import DateTime, ForeignKey, LargeBinary, MetaData, String, UniqueConstraint -from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column +from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, relationship from ulid import ULID +from renku_data_services.base_orm.registry import COMMON_ORM_REGISTRY from renku_data_services.secrets import models from renku_data_services.users.orm import UserORM from renku_data_services.utils.sqlalchemy import ULIDType -metadata_obj = MetaData(schema="secrets") # Has to match alembic ini section name +if TYPE_CHECKING: + from renku_data_services.data_connectors.orm import DataConnectorSecretORM + from renku_data_services.project.orm import SessionSecretORM class BaseORM(MappedAsDataclass, DeclarativeBase): """Base class for all ORM classes.""" - metadata = metadata_obj + metadata = MetaData(schema="secrets") + registry = COMMON_ORM_REGISTRY class SecretORM(BaseORM): @@ -27,11 +31,20 @@ class SecretORM(BaseORM): __table_args__ = ( UniqueConstraint( "user_id", - "name", - name="_unique_name_user", + "default_filename", + name="_unique_user_id_default_filename", ), ) - name: Mapped[str] = mapped_column(String(256)) + + id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + """ID of this user secret.""" + + name: Mapped[str] = mapped_column(String(99)) + """Name of the user secret.""" + + default_filename: Mapped[str] = mapped_column(String(256)) + """Filename to give to this secret when mounted in Renku 1.0 sessions.""" + encrypted_value: Mapped[bytes] = mapped_column(LargeBinary()) encrypted_key: Mapped[bytes] = mapped_column(LargeBinary()) kind: Mapped[models.SecretKind] @@ -39,36 +52,36 @@ class SecretORM(BaseORM): "expiration_timestamp", DateTime(timezone=True), default=None, nullable=True ) modification_date: Mapped[datetime] = mapped_column( - "modification_date", DateTime(timezone=True), default_factory=lambda: datetime.now(UTC).replace(microsecond=0) + "modification_date", + DateTime(timezone=True), + default_factory=lambda: datetime.now(UTC), + nullable=False, ) - id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + user_id: Mapped[Optional[str]] = mapped_column( "user_id", ForeignKey(UserORM.keycloak_id, ondelete="CASCADE"), default=None, index=True, nullable=True ) + session_secrets: Mapped[list["SessionSecretORM"]] = relationship( + init=False, repr=False, back_populates="secret", lazy="selectin", default_factory=list + ) + + data_connector_secrets: Mapped[list["DataConnectorSecretORM"]] = relationship( + init=False, repr=False, back_populates="secret", lazy="selectin", default_factory=list + ) + def dump(self) -> models.Secret: """Create a secret object from the ORM object.""" - secret = models.Secret( + return models.Secret( id=self.id, name=self.name, + default_filename=self.default_filename, encrypted_value=self.encrypted_value, encrypted_key=self.encrypted_key, kind=self.kind, - expiration_timestamp=self.expiration_timestamp, - ) - secret.modification_date = self.modification_date - return secret - - @classmethod - def load(cls, secret: models.UnsavedSecret) -> "SecretORM": - """Create an ORM object from the user object.""" - return cls( - name=secret.name, - encrypted_value=secret.encrypted_value, - encrypted_key=secret.encrypted_key, - kind=secret.kind, - expiration_timestamp=secret.expiration_timestamp, - modification_date=secret.modification_date, + modification_date=self.modification_date, + session_secret_slot_ids=[item.secret_slot_id for item in self.session_secrets], + data_connector_ids=[item.data_connector_id for item in self.data_connector_secrets], ) def update(self, encrypted_value: bytes, encrypted_key: bytes, expiration_timestamp: datetime | None) -> None: @@ -76,4 +89,4 @@ def update(self, encrypted_value: bytes, encrypted_key: bytes, expiration_timest self.encrypted_value = encrypted_value self.encrypted_key = encrypted_key self.expiration_timestamp = expiration_timestamp - self.modification_date = datetime.now(UTC).replace(microsecond=0) + self.modification_date = datetime.now(UTC) diff --git a/components/renku_data_services/session/api.spec.yaml b/components/renku_data_services/session/api.spec.yaml index 11b188344..49e3e8fa3 100644 --- a/components/renku_data_services/session/api.spec.yaml +++ b/components/renku_data_services/session/api.spec.yaml @@ -11,6 +11,19 @@ paths: /environments: get: summary: Get all global environments + parameters: + - in: query + style: form + explode: true + name: get_environment_params + schema: + type: object + additionalProperties: false + properties: + include_archived: + type: boolean + default: false + description: Whether to return archived environments or not responses: "200": description: List of global environments @@ -240,6 +253,107 @@ paths: $ref: "#/components/responses/Error" tags: - session_launchers + /builds/{build_id}: + parameters: + - in: path + name: build_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + get: + summary: Get the details of a container image build + responses: + "200": + description: The container image build + content: + application/json: + schema: + $ref: "#/components/schemas/Build" + default: + $ref: "#/components/responses/Error" + tags: + - builds + patch: + summary: Update a container image build + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/BuildPatch" + responses: + "200": + description: The updated container image build + content: + application/json: + schema: + $ref: "#/components/schemas/Build" + default: + $ref: "#/components/responses/Error" + tags: + - builds + /builds/{build_id}/logs: + parameters: + - in: path + name: build_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + get: + summary: Get the logs of a container image build + parameters: + - description: The maximum number of most-recent lines to return for each container + in: query + name: max_lines + required: false + schema: + type: integer + default: 250 + responses: + "200": + description: The build logs + content: + application/json: + schema: + $ref: "#/components/schemas/BuildLogs" + default: + $ref: "#/components/responses/Error" + tags: + - builds + /environments/{environment_id}/builds: + parameters: + - in: path + name: environment_id + required: true + schema: + $ref: "#/components/schemas/Ulid" + get: + summary: Get a session environment's list of builds + responses: + "200": + description: List of container image builds + content: + application/json: + schema: + $ref: "#/components/schemas/BuildList" + default: + $ref: "#/components/responses/Error" + tags: + - builds + post: + summary: Create a new container image build + responses: + "201": + description: The build was created + content: + application/json: + schema: + $ref: "#/components/schemas/Build" + # TODO: 409 CONFLICT response + default: + $ref: "#/components/responses/Error" + tags: + - builds components: schemas: EnvironmentList: @@ -247,7 +361,7 @@ components: type: array items: $ref: "#/components/schemas/Environment" - Environment: + EnvironmentWithoutContainerImage: description: A Renku 2.0 session environment type: object properties: @@ -259,8 +373,6 @@ components: $ref: "#/components/schemas/CreationDate" description: $ref: "#/components/schemas/Description" - container_image: - $ref: "#/components/schemas/ContainerImage" default_url: $ref: "#/components/schemas/DefaultUrl" uid: @@ -277,39 +389,63 @@ components: $ref: "#/components/schemas/EnvironmentCommand" args: $ref: "#/components/schemas/EnvironmentArgs" + is_archived: + $ref: "#/components/schemas/IsArchived" required: - id - name - creation_date - - container_image - port - uid - gid - default_url - example: - id: 01AN4Z79ZS6XX96588FDX0H099 - name: JupyterLab environment - creation_date: "2023-11-01T17:32:28Z" - description: JupyterLab session environment - container_image: renku-jupyter:latest - default_url: "/lab" - port: 8080 - working_directory: /home/jovyan/work - mount_directory: /home/jovyan/work - uid: 1000 - gid: 1000 + Environment: + allOf: + - $ref: "#/components/schemas/EnvironmentWithoutContainerImage" + - type: object + properties: + container_image: + $ref: "#/components/schemas/ContainerImage" + required: + - container_image EnvironmentGetInLauncher: + oneOf: + - $ref: "#/components/schemas/EnvironmentWithImageGet" + - $ref: "#/components/schemas/EnvironmentWithBuildGet" + EnvironmentWithImageGet: allOf: - $ref: "#/components/schemas/Environment" - type: object properties: + environment_image_source: + $ref: "#/components/schemas/EnvironmentImageSourceImage" environment_kind: - $ref: "#/components/schemas/EnvironmentKind" + allOf: + - $ref: "#/components/schemas/EnvironmentKind" + default: custom required: + - environment_image_source - environment_kind - example: - environment_kind: global_environment - EnvironmentPostInLauncher: + EnvironmentWithBuildGet: + allOf: + - $ref: "#/components/schemas/EnvironmentWithoutContainerImage" + - type: object + properties: + container_image: + $ref: "#/components/schemas/ContainerImage" + build_parameters: + $ref: "#/components/schemas/BuildParameters" + environment_image_source: + $ref: "#/components/schemas/EnvironmentImageSourceBuild" + environment_kind: + allOf: + - $ref: "#/components/schemas/EnvironmentKind" + default: custom + required: + - build_parameters + - environment_image_source + - environment_kind + EnvironmentPostInLauncherHelper: allOf: - $ref: "#/components/schemas/EnvironmentPost" - type: object @@ -318,8 +454,10 @@ components: $ref: "#/components/schemas/EnvironmentKind" required: - environment_kind - example: - environment_kind: global_environment + EnvironmentPostInLauncher: + oneOf: + - $ref: "#/components/schemas/EnvironmentPostInLauncherHelper" + - $ref: "#/components/schemas/BuildParametersPost" EnvironmentPost: description: Data required to create a session environment type: object @@ -333,17 +471,14 @@ components: default_url: allOf: - $ref: "#/components/schemas/DefaultUrl" - - default: /lab default: /lab uid: allOf: - $ref: "#/components/schemas/EnvironmentUid" - - default: 1000 default: 1000 gid: allOf: - $ref: "#/components/schemas/EnvironmentGid" - - default: 1000 default: 1000 working_directory: $ref: "#/components/schemas/EnvironmentWorkingDirectory" @@ -352,15 +487,21 @@ components: port: allOf: - $ref: "#/components/schemas/EnvironmentPort" - - default: 8080 default: 8080 command: $ref: "#/components/schemas/EnvironmentCommand" args: $ref: "#/components/schemas/EnvironmentArgs" + is_archived: + allOf: + - $ref: "#/components/schemas/IsArchived" + default: false + environment_image_source: + $ref: "#/components/schemas/EnvironmentImageSourceImage" required: - name - container_image + - environment_image_source EnvironmentPatchInLauncher: allOf: - $ref: "#/components/schemas/EnvironmentPatch" @@ -368,6 +509,10 @@ components: properties: environment_kind: $ref: "#/components/schemas/EnvironmentKind" + environment_image_source: + $ref: "#/components/schemas/EnvironmentImageSource" + build_parameters: + $ref: "#/components/schemas/BuildParametersPatch" EnvironmentPatch: type: object description: Update a session environment @@ -386,15 +531,17 @@ components: gid: $ref: "#/components/schemas/EnvironmentGid" working_directory: - $ref: "#/components/schemas/EnvironmentWorkingDirectory" + $ref: "#/components/schemas/EnvironmentWorkingDirectoryPatch" mount_directory: - $ref: "#/components/schemas/EnvironmentMountDirectory" + $ref: "#/components/schemas/EnvironmentMountDirectoryPatch" port: $ref: "#/components/schemas/EnvironmentPort" command: $ref: "#/components/schemas/EnvironmentCommand" args: $ref: "#/components/schemas/EnvironmentArgs" + is_archived: + $ref: "#/components/schemas/IsArchived" SessionLaunchersList: description: A list of Renku session launchers type: array @@ -419,6 +566,10 @@ components: $ref: "#/components/schemas/EnvironmentGetInLauncher" resource_class_id: $ref: "#/components/schemas/ResourceClassId" + disk_storage: + $ref: "#/components/schemas/DiskStorage" + env_variables: + $ref: "#/components/schemas/EnvVariables" required: - id - project_id @@ -426,25 +577,6 @@ components: - creation_date - environment - resource_class_id - example: - id: 01AN4Z79ZS5XN0F25N3DB94T4R - project_id: 01AN4Z79ZS5XN0F25N3DB94T4R - name: Renku R Session - creation_date: "2023-11-01T17:32:28Z" - description: R compute session - environment: - id: 01AN4Z79ZS6XX96588FDX0H099 - name: Rstudio - creation_date: "2023-11-01T17:32:28Z" - description: JupyterLab session environment - environment_kind: GLOBAL - container_image: rocker/rstudio - default_url: "/rstudio" - port: 8080 - working_directory: /home/rstudio/work - mount_directory: /home/rstudio/work - uid: 1000 - gid: 1000 SessionLauncherPost: description: Data required to create a session launcher type: object @@ -458,6 +590,10 @@ components: $ref: "#/components/schemas/Description" resource_class_id: $ref: "#/components/schemas/ResourceClassId" + disk_storage: + $ref: "#/components/schemas/DiskStorage" + env_variables: + $ref: "#/components/schemas/EnvVariables" environment: oneOf: - $ref: "#/components/schemas/EnvironmentPostInLauncher" @@ -482,6 +618,10 @@ components: $ref: "#/components/schemas/Description" resource_class_id: $ref: "#/components/schemas/ResourceClassId" + disk_storage: + $ref: "#/components/schemas/DiskStoragePatch" + env_variables: + $ref: "#/components/schemas/EnvVariables" environment: oneOf: - $ref: "#/components/schemas/EnvironmentPatchInLauncher" @@ -498,6 +638,34 @@ components: minLength: 1 maxLength: 99 example: My Renku Session :) + BuilderVariant: + description: Type of virtual environment manager when building custom environments. + type: string + minLength: 1 + maxLength: 99 + FrontendVariant: + description: User's Frontend Choice. + type: string + minLength: 1 + maxLength: 99 + RepositoryRevision: + description: A git revision + type: string + minLength: 1 + maxLength: 500 + RepositoryRevisionPatch: + description: A git revision + type: string + maxLength: 500 + BuildContextDir: + description: The relative path to a folder + type: string + minLength: 1 + maxLength: 500 + BuildContextDirPatch: + description: The relative path to a folder + type: string + maxLength: 500 EnvironmentIdOnlyPatch: type: object properties: @@ -511,12 +679,24 @@ components: required: - id EnvironmentKind: - description: Kind of environment to use + description: Kind of the environment type: string enum: - GLOBAL - CUSTOM - example: CUSTOM + EnvironmentImageSourceImage: + type: string + enum: + - image + EnvironmentImageSourceBuild: + type: string + enum: + - build + EnvironmentImageSource: + description: Source of the environment's image + oneOf: + - $ref: "#/components/schemas/EnvironmentImageSourceImage" + - $ref: "#/components/schemas/EnvironmentImageSourceBuild" EnvironmentId: description: Id of the environment to use type: string @@ -531,11 +711,59 @@ components: description: A description for the resource type: string maxLength: 500 + BuildParameters: + description: Build parameters + type: object + additionalProperties: false + properties: + repository: + $ref: "#/components/schemas/Repository" + builder_variant: + $ref: "#/components/schemas/BuilderVariant" + frontend_variant: + $ref: "#/components/schemas/FrontendVariant" + repository_revision: + $ref: "#/components/schemas/RepositoryRevision" + context_dir: + $ref: "#/components/schemas/BuildContextDir" + required: + - repository + - builder_variant + - frontend_variant + BuildParametersPost: + allOf: + - $ref: "#/components/schemas/BuildParameters" + - type: object + properties: + environment_image_source: + $ref: "#/components/schemas/EnvironmentImageSourceBuild" + required: + - environment_image_source + BuildParametersPatch: + description: Data for updating a build + type: object + properties: + repository: + $ref: "#/components/schemas/Repository" + builder_variant: + $ref: "#/components/schemas/BuilderVariant" + frontend_variant: + $ref: "#/components/schemas/FrontendVariant" + repository_revision: + $ref: "#/components/schemas/RepositoryRevisionPatch" + context_dir: + $ref: "#/components/schemas/BuildContextDirPatch" ContainerImage: description: A container image type: string maxLength: 500 + # NOTE: regex for an image name, optionally with a tag or sha256 specified + # based on https://github.com/opencontainers/distribution-spec/blob/main/spec.md + pattern: "^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$" example: renku/renkulab-py:3.10-0.18.1 + Repository: + description: A git repository URL + type: string DefaultUrl: description: The default path to open in a session type: string @@ -546,6 +774,15 @@ components: type: integer default: null nullable: true + DiskStorage: + description: The size of disk storage for the session, in gigabytes + type: integer + minimum: 1 + example: 8 + DiskStoragePatch: + type: integer + minimum: 1 + nullable: true EnvironmentPort: type: integer minimum: 0 @@ -570,22 +807,163 @@ components: type: string description: The location where the session will start, if left unset it will default to the session image working directory. minLength: 1 + example: "/home/jovyan/work" + EnvironmentWorkingDirectoryPatch: + type: string + example: "/home/jovyan/work" EnvironmentMountDirectory: type: string - description: The location where the persistent storage for the session will be mounted, usually it should be identical to or a parent of the working directory, if left unset will default to the working directory. + description: + The location where the persistent storage for the session will be mounted, usually it should be identical to or + a parent of the working directory, if left unset will default to the working directory. minLength: 1 + example: "/home/jovyan/work" + EnvironmentMountDirectoryPatch: + type: string + example: "/home/jovyan/work" EnvironmentCommand: type: array items: type: string description: The command that will be run i.e. will overwrite the image Dockerfile ENTRYPOINT, equivalent to command in Kubernetes - minLength: 1 + minItems: 1 EnvironmentArgs: type: array items: type: string description: The arguments that will follow the command, i.e. will overwrite the image Dockerfile CMD, equivalent to args in Kubernetes - minLength: 1 + minItems: 1 + IsArchived: + type: boolean + description: Whether this environment is archived and not for use in new projects or not + default: false + Build: + description: A container image build + type: object + allOf: + - $ref: "#/components/schemas/BuildCommonPart" + - oneOf: + - $ref: "#/components/schemas/BuildNotCompletedPart" + - $ref: "#/components/schemas/BuildCompletedPart" + discriminator: + propertyName: status + BuildCommonPart: + type: object + properties: + id: + $ref: "#/components/schemas/Ulid" + environment_id: + $ref: "#/components/schemas/Ulid" + created_at: + $ref: "#/components/schemas/CreationDate" + error_reason: + $ref: "#/components/schemas/ErrorReason" + required: + - id + - environment_id + - created_at + additionalProperties: false + BuildNotCompletedPart: + type: object + properties: + status: + type: string + enum: + - "in_progress" + - "failed" + - "cancelled" + example: "in_progress" + required: + - status + additionalProperties: false + BuildCompletedPart: + type: object + properties: + status: + type: string + enum: + - "succeeded" + example: "succeeded" + result: + $ref: "#/components/schemas/BuildResult" + required: + - status + - result + additionalProperties: false + BuildList: + description: A list of container image builds + type: array + items: + $ref: "#/components/schemas/Build" + BuildPatch: + description: The requested update of a container image build + type: object + properties: + status: + type: string + enum: + - "cancelled" + additionalProperties: false + BuildLogs: + description: The logs of a container image build + type: object + additionalProperties: + type: string + example: + "container-A": "Log line 1\nLog line 2" + "container-B": "Log line 1\nLog line 2" + BuildResult: + description: The result of a container image build + type: object + properties: + image: + $ref: "#/components/schemas/ContainerImage" + completed_at: + $ref: "#/components/schemas/CreationDate" + repository_url: + type: string + repository_git_commit_sha: + type: string + required: + - image + - completed_at + - repository_url + - repository_git_commit_sha + additionalProperties: false + BuildStatus: + description: The status of a container image build + type: string + enum: + - "in_progress" + - "succeeded" + - "failed" + - "cancelled" + example: "succeeded" + EnvVariables: + description: Environment variables for the session pod + type: array + maxItems: 32 + items: + $ref: "#/components/schemas/EnvVar" + EnvVar: + description: An environment variable for the session pod + type: object + properties: + name: + type: string + maxLength: 256 + # based on https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_235 + pattern: "^[a-zA-Z_][a-zA-Z0-9_]*$" + example: MY_VAR + value: + type: string + maxLength: 500 + required: + - name + ErrorReason: + description: The reason why a container image build did not succeed, if available. + type: string + example: "StepOutOfMemory" ErrorResponse: type: object properties: diff --git a/components/renku_data_services/session/apispec.py b/components/renku_data_services/session/apispec.py index 1a14b30ab..48e1cdf5a 100644 --- a/components/renku_data_services/session/apispec.py +++ b/components/renku_data_services/session/apispec.py @@ -1,12 +1,12 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-11-24T09:32:46+00:00 +# timestamp: 2025-07-21T13:24:31+00:00 from __future__ import annotations from datetime import datetime from enum import Enum -from typing import List, Optional, Union +from typing import Dict, List, Optional, Union from pydantic import ConfigDict, Field, RootModel from renku_data_services.session.apispec_base import BaseAPISpec @@ -17,19 +17,112 @@ class EnvironmentKind(Enum): CUSTOM = "CUSTOM" +class EnvironmentImageSourceImage(Enum): + image = "image" + + +class EnvironmentImageSourceBuild(Enum): + build = "build" + + +class Status(Enum): + in_progress = "in_progress" + failed = "failed" + cancelled = "cancelled" + + +class BuildNotCompletedPart(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + status: Status = Field(..., examples=["in_progress"]) + + +class Status1(Enum): + succeeded = "succeeded" + + +class Status2(Enum): + cancelled = "cancelled" + + +class BuildPatch(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + status: Optional[Status2] = None + + +class BuildLogs(RootModel[Optional[Dict[str, str]]]): + root: Optional[Dict[str, str]] = None + + +class BuildResult(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + image: str = Field( + ..., + description="A container image", + examples=["renku/renkulab-py:3.10-0.18.1"], + max_length=500, + pattern="^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$", + ) + completed_at: datetime = Field( + ..., + description="The date and time the resource was created (in UTC and ISO-8601 format)", + examples=["2023-11-01T17:32:28Z"], + ) + repository_url: str + repository_git_commit_sha: str + + +class BuildStatus(Enum): + in_progress = "in_progress" + succeeded = "succeeded" + failed = "failed" + cancelled = "cancelled" + + +class EnvVar(BaseAPISpec): + name: str = Field( + ..., examples=["MY_VAR"], max_length=256, pattern="^[a-zA-Z_][a-zA-Z0-9_]*$" + ) + value: Optional[str] = Field(None, max_length=500) + + class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): error: Error -class Environment(BaseAPISpec): +class GetEnvironmentParams(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + include_archived: bool = Field( + False, description="Whether to return archived environments or not" + ) + + +class EnvironmentsGetParametersQuery(BaseAPISpec): + get_environment_params: Optional[GetEnvironmentParams] = None + + +class BuildsBuildIdLogsGetParametersQuery(BaseAPISpec): + max_lines: int = 250 + + +class EnvironmentWithoutContainerImage(BaseAPISpec): id: str = Field( ..., description="ULID identifier", @@ -40,28 +133,22 @@ class Environment(BaseAPISpec): name: str = Field( ..., description="Renku session name", - example="My Renku Session :)", + examples=["My Renku Session :)"], max_length=99, min_length=1, ) creation_date: datetime = Field( ..., description="The date and time the resource was created (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", + examples=["2023-11-01T17:32:28Z"], ) description: Optional[str] = Field( None, description="A description for the resource", max_length=500 ) - container_image: str = Field( - ..., - description="A container image", - example="renku/renkulab-py:3.10-0.18.1", - max_length=500, - ) default_url: str = Field( ..., description="The default path to open in a session", - example="/lab", + examples=["/lab"], max_length=200, ) uid: int = Field( @@ -73,11 +160,13 @@ class Environment(BaseAPISpec): working_directory: Optional[str] = Field( None, description="The location where the session will start, if left unset it will default to the session image working directory.", + examples=["/home/jovyan/work"], min_length=1, ) mount_directory: Optional[str] = Field( None, description="The location where the persistent storage for the session will be mounted, usually it should be identical to or a parent of the working directory, if left unset will default to the working directory.", + examples=["/home/jovyan/work"], min_length=1, ) port: int = Field( @@ -96,17 +185,32 @@ class Environment(BaseAPISpec): description="The arguments that will follow the command, i.e. will overwrite the image Dockerfile CMD, equivalent to args in Kubernetes", min_length=1, ) + is_archived: Optional[bool] = Field( + False, + description="Whether this environment is archived and not for use in new projects or not", + ) -class EnvironmentGetInLauncher(Environment): - environment_kind: EnvironmentKind +class Environment(EnvironmentWithoutContainerImage): + container_image: str = Field( + ..., + description="A container image", + examples=["renku/renkulab-py:3.10-0.18.1"], + max_length=500, + pattern="^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$", + ) + + +class EnvironmentWithImageGet(Environment): + environment_image_source: EnvironmentImageSourceImage + environment_kind: EnvironmentKind = "custom" class EnvironmentPost(BaseAPISpec): name: str = Field( ..., description="Renku session name", - example="My Renku Session :)", + examples=["My Renku Session :)"], max_length=99, min_length=1, ) @@ -116,13 +220,14 @@ class EnvironmentPost(BaseAPISpec): container_image: str = Field( ..., description="A container image", - example="renku/renkulab-py:3.10-0.18.1", + examples=["renku/renkulab-py:3.10-0.18.1"], max_length=500, + pattern="^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$", ) default_url: str = Field( "/lab", description="The default path to open in a session", - example="/lab", + examples=["/lab"], max_length=200, ) uid: int = Field( @@ -134,11 +239,13 @@ class EnvironmentPost(BaseAPISpec): working_directory: Optional[str] = Field( None, description="The location where the session will start, if left unset it will default to the session image working directory.", + examples=["/home/jovyan/work"], min_length=1, ) mount_directory: Optional[str] = Field( None, description="The location where the persistent storage for the session will be mounted, usually it should be identical to or a parent of the working directory, if left unset will default to the working directory.", + examples=["/home/jovyan/work"], min_length=1, ) port: int = Field( @@ -157,6 +264,11 @@ class EnvironmentPost(BaseAPISpec): description="The arguments that will follow the command, i.e. will overwrite the image Dockerfile CMD, equivalent to args in Kubernetes", min_length=1, ) + is_archived: bool = Field( + False, + description="Whether this environment is archived and not for use in new projects or not", + ) + environment_image_source: EnvironmentImageSourceImage class EnvironmentPatch(BaseAPISpec): @@ -166,7 +278,7 @@ class EnvironmentPatch(BaseAPISpec): name: Optional[str] = Field( None, description="Renku session name", - example="My Renku Session :)", + examples=["My Renku Session :)"], max_length=99, min_length=1, ) @@ -176,13 +288,14 @@ class EnvironmentPatch(BaseAPISpec): container_image: Optional[str] = Field( None, description="A container image", - example="renku/renkulab-py:3.10-0.18.1", + examples=["renku/renkulab-py:3.10-0.18.1"], max_length=500, + pattern="^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$", ) default_url: Optional[str] = Field( None, description="The default path to open in a session", - example="/lab", + examples=["/lab"], max_length=200, ) uid: Optional[int] = Field( @@ -191,16 +304,8 @@ class EnvironmentPatch(BaseAPISpec): gid: Optional[int] = Field( None, description="The group ID used to run the session", gt=0, le=65535 ) - working_directory: Optional[str] = Field( - None, - description="The location where the session will start, if left unset it will default to the session image working directory.", - min_length=1, - ) - mount_directory: Optional[str] = Field( - None, - description="The location where the persistent storage for the session will be mounted, usually it should be identical to or a parent of the working directory, if left unset will default to the working directory.", - min_length=1, - ) + working_directory: Optional[str] = Field(None, examples=["/home/jovyan/work"]) + mount_directory: Optional[str] = Field(None, examples=["/home/jovyan/work"]) port: Optional[int] = Field( None, description="The TCP port (on any container in the session) where user requests will be routed to from the ingress", @@ -217,78 +322,140 @@ class EnvironmentPatch(BaseAPISpec): description="The arguments that will follow the command, i.e. will overwrite the image Dockerfile CMD, equivalent to args in Kubernetes", min_length=1, ) + is_archived: Optional[bool] = Field( + False, + description="Whether this environment is archived and not for use in new projects or not", + ) -class SessionLauncher(BaseAPISpec): +class EnvironmentIdOnlyPatch(BaseAPISpec): + id: Optional[str] = Field( + None, + description="Id of the environment to use", + examples=["01AN4Z79ZS6XX96588FDX0H099"], + min_length=1, + ) + + +class EnvironmentIdOnlyPost(BaseAPISpec): id: str = Field( ..., - description="ULID identifier", - max_length=26, - min_length=26, - pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + description="Id of the environment to use", + examples=["01AN4Z79ZS6XX96588FDX0H099"], + min_length=1, ) - project_id: str = Field( - ..., - description="ULID identifier", - max_length=26, - min_length=26, - pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + + +class BuildParameters(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", ) - name: str = Field( + repository: str = Field(..., description="A git repository URL") + builder_variant: str = Field( ..., - description="Renku session name", - example="My Renku Session :)", + description="Type of virtual environment manager when building custom environments.", max_length=99, min_length=1, ) - creation_date: datetime = Field( - ..., - description="The date and time the resource was created (in UTC and ISO-8601 format)", - example="2023-11-01T17:32:28Z", + frontend_variant: str = Field( + ..., description="User's Frontend Choice.", max_length=99, min_length=1 ) - description: Optional[str] = Field( - None, description="A description for the resource", max_length=500 + repository_revision: Optional[str] = Field( + None, description="A git revision", max_length=500, min_length=1 ) - environment: EnvironmentGetInLauncher - resource_class_id: Optional[int] = Field( - ..., description="The identifier of a resource class" + context_dir: Optional[str] = Field( + None, description="The relative path to a folder", max_length=500, min_length=1 ) -class EnvironmentIdOnlyPatch(BaseAPISpec): - id: Optional[str] = Field( +class BuildParametersPost(BuildParameters): + environment_image_source: EnvironmentImageSourceBuild + + +class BuildParametersPatch(BaseAPISpec): + repository: Optional[str] = Field(None, description="A git repository URL") + builder_variant: Optional[str] = Field( None, - description="Id of the environment to use", - example="01AN4Z79ZS6XX96588FDX0H099", + description="Type of virtual environment manager when building custom environments.", + max_length=99, min_length=1, ) + frontend_variant: Optional[str] = Field( + None, description="User's Frontend Choice.", max_length=99, min_length=1 + ) + repository_revision: Optional[str] = Field( + None, description="A git revision", max_length=500 + ) + context_dir: Optional[str] = Field( + None, description="The relative path to a folder", max_length=500 + ) -class EnvironmentIdOnlyPost(BaseAPISpec): +class BuildCommonPart(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) id: str = Field( ..., - description="Id of the environment to use", - example="01AN4Z79ZS6XX96588FDX0H099", - min_length=1, + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + environment_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + created_at: datetime = Field( + ..., + description="The date and time the resource was created (in UTC and ISO-8601 format)", + examples=["2023-11-01T17:32:28Z"], + ) + error_reason: Optional[str] = Field( + None, + description="The reason why a container image build did not succeed, if available.", + examples=["StepOutOfMemory"], ) +class BuildCompletedPart(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + status: Status1 = Field(..., examples=["succeeded"]) + result: BuildResult + + class EnvironmentList(RootModel[List[Environment]]): root: List[Environment] = Field(..., description="A list of session environments") -class EnvironmentPostInLauncher(EnvironmentPost): +class EnvironmentWithBuildGet(EnvironmentWithoutContainerImage): + container_image: Optional[str] = Field( + None, + description="A container image", + examples=["renku/renkulab-py:3.10-0.18.1"], + max_length=500, + pattern="^[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*(\\/[a-z0-9]+((\\.|_|__|-+)[a-z0-9]+)*)*(:[a-zA-Z0-9_][a-zA-Z0-9._-]{0,127}|@sha256:[a-fA-F0-9]{64}){0,1}$", + ) + build_parameters: BuildParameters + environment_image_source: EnvironmentImageSourceBuild + environment_kind: EnvironmentKind = "custom" + + +class EnvironmentPostInLauncherHelper(EnvironmentPost): environment_kind: EnvironmentKind class EnvironmentPatchInLauncher(EnvironmentPatch): environment_kind: Optional[EnvironmentKind] = None - - -class SessionLaunchersList(RootModel[List[SessionLauncher]]): - root: List[SessionLauncher] = Field( - ..., description="A list of Renku session launchers", min_length=0 - ) + environment_image_source: Optional[ + Union[EnvironmentImageSourceImage, EnvironmentImageSourceBuild] + ] = Field(None, description="Source of the environment's image") + build_parameters: Optional[BuildParametersPatch] = None class SessionLauncherPost(BaseAPISpec): @@ -298,7 +465,7 @@ class SessionLauncherPost(BaseAPISpec): name: str = Field( ..., description="Renku session name", - example="My Renku Session :)", + examples=["My Renku Session :)"], max_length=99, min_length=1, ) @@ -315,7 +482,19 @@ class SessionLauncherPost(BaseAPISpec): resource_class_id: Optional[int] = Field( None, description="The identifier of a resource class" ) - environment: Union[EnvironmentPostInLauncher, EnvironmentIdOnlyPost] + disk_storage: Optional[int] = Field( + None, + description="The size of disk storage for the session, in gigabytes", + examples=[8], + ge=1, + ) + env_variables: Optional[List[EnvVar]] = Field( + None, description="Environment variables for the session pod", max_length=32 + ) + environment: Union[ + EnvironmentIdOnlyPost, + Union[EnvironmentPostInLauncherHelper, BuildParametersPost], + ] class SessionLauncherPatch(BaseAPISpec): @@ -325,7 +504,7 @@ class SessionLauncherPatch(BaseAPISpec): name: Optional[str] = Field( None, description="Renku session name", - example="My Renku Session :)", + examples=["My Renku Session :)"], max_length=99, min_length=1, ) @@ -335,6 +514,79 @@ class SessionLauncherPatch(BaseAPISpec): resource_class_id: Optional[int] = Field( None, description="The identifier of a resource class" ) + disk_storage: Optional[int] = Field(None, ge=1) + env_variables: Optional[List[EnvVar]] = Field( + None, description="Environment variables for the session pod", max_length=32 + ) environment: Optional[Union[EnvironmentPatchInLauncher, EnvironmentIdOnlyPatch]] = ( None ) + + +class Build1(BuildCommonPart): + pass + + +class Build2(BuildNotCompletedPart, Build1): + pass + + +class Build3(BuildCompletedPart, Build1): + pass + + +class BuildList(RootModel[List[Union[Build2, Build3]]]): + root: List[Union[Build2, Build3]] = Field( + ..., description="A list of container image builds" + ) + + +class SessionLauncher(BaseAPISpec): + id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + project_id: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + name: str = Field( + ..., + description="Renku session name", + examples=["My Renku Session :)"], + max_length=99, + min_length=1, + ) + creation_date: datetime = Field( + ..., + description="The date and time the resource was created (in UTC and ISO-8601 format)", + examples=["2023-11-01T17:32:28Z"], + ) + description: Optional[str] = Field( + None, description="A description for the resource", max_length=500 + ) + environment: Union[EnvironmentWithImageGet, EnvironmentWithBuildGet] + resource_class_id: Optional[int] = Field( + ..., description="The identifier of a resource class" + ) + disk_storage: Optional[int] = Field( + None, + description="The size of disk storage for the session, in gigabytes", + examples=[8], + ge=1, + ) + env_variables: Optional[List[EnvVar]] = Field( + None, description="Environment variables for the session pod", max_length=32 + ) + + +class SessionLaunchersList(RootModel[List[SessionLauncher]]): + root: List[SessionLauncher] = Field( + ..., description="A list of Renku session launchers", min_length=0 + ) diff --git a/components/renku_data_services/session/apispec_extras.py b/components/renku_data_services/session/apispec_extras.py new file mode 100644 index 000000000..fd19ae747 --- /dev/null +++ b/components/renku_data_services/session/apispec_extras.py @@ -0,0 +1,13 @@ +"""Extra definitions for the API spec.""" + +from typing import Union + +from pydantic import Field, RootModel + +from renku_data_services.session.apispec import Build2, Build3 + + +class Build(RootModel[Union[Build2, Build3]]): + """A build.""" + + root: Union[Build2, Build3] = Field(...) diff --git a/components/renku_data_services/session/blueprints.py b/components/renku_data_services/session/blueprints.py index 93359fd7f..a0a5e7f72 100644 --- a/components/renku_data_services/session/blueprints.py +++ b/components/renku_data_services/session/blueprints.py @@ -7,14 +7,18 @@ from sanic_ext import validate from ulid import ULID -from renku_data_services import base_models +from renku_data_services import base_models, errors from renku_data_services.base_api.auth import authenticate, only_authenticated from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint +from renku_data_services.base_api.misc import validate_query +from renku_data_services.base_models.metrics import MetricsService from renku_data_services.base_models.validation import validated_json -from renku_data_services.session import apispec, models +from renku_data_services.session import apispec, apispec_extras, models from renku_data_services.session.core import ( + validate_build_patch, validate_environment_patch, validate_session_launcher_patch, + validate_unsaved_build, validate_unsaved_environment, validate_unsaved_session_launcher, ) @@ -31,8 +35,9 @@ class EnvironmentsBP(CustomBlueprint): def get_all(self) -> BlueprintFactoryResponse: """List all session environments.""" - async def _get_all(_: Request) -> JSONResponse: - environments = await self.session_repo.get_environments() + @validate_query(query=apispec.GetEnvironmentParams) + async def _get_all(_: Request, query: apispec.GetEnvironmentParams) -> JSONResponse: + environments = await self.session_repo.get_environments(include_archived=query.include_archived) return validated_json(apispec.EnvironmentList, environments) return "/environments", ["GET"], _get_all @@ -94,6 +99,7 @@ class SessionLaunchersBP(CustomBlueprint): session_repo: SessionRepository authenticator: base_models.Authenticator + metrics: MetricsService def get_all(self) -> BlueprintFactoryResponse: """List all session launcher visible to user.""" @@ -122,8 +128,13 @@ def post(self) -> BlueprintFactoryResponse: @only_authenticated @validate(json=apispec.SessionLauncherPost) async def _post(_: Request, user: base_models.APIUser, body: apispec.SessionLauncherPost) -> JSONResponse: - new_launcher = validate_unsaved_session_launcher(body) + new_launcher = validate_unsaved_session_launcher(body, builds_config=self.session_repo.builds_config) launcher = await self.session_repo.insert_launcher(user=user, launcher=new_launcher) + await self.metrics.session_launcher_created( + user, + environment_kind=launcher.environment.environment_kind.value, + environment_image_source=launcher.environment.environment_image_source.value, + ) return validated_json(apispec.SessionLauncher, launcher, status=201) return "/session_launchers", ["POST"], _post @@ -133,13 +144,25 @@ def patch(self) -> BlueprintFactoryResponse: @authenticate(self.authenticator) @only_authenticated - @validate(json=apispec.SessionLauncherPatch) - async def _patch( - _: Request, user: base_models.APIUser, launcher_id: ULID, body: apispec.SessionLauncherPatch - ) -> JSONResponse: + async def _patch(request: Request, user: base_models.APIUser, launcher_id: ULID) -> JSONResponse: async with self.session_repo.session_maker() as session, session.begin(): current_launcher = await self.session_repo.get_launcher(user, launcher_id) - launcher_patch = validate_session_launcher_patch(body, current_launcher) + body = apispec.SessionLauncherPatch.model_validate(request.json) + + # NOTE: This is required to deal with the multiple possible types for the environment field: If some + # random fields are passed then the validation chooses the environment type to be EnvironmentIdOnlyPatch + # which might not be the case and would set the session's environment ID to None. + # TODO: Check how validation exactly works for Union types to see if we can do this in a clear way. + if isinstance(body.environment, apispec.EnvironmentIdOnlyPatch) and "id" not in request.json.get( + "environment", {} + ): + raise errors.ValidationError( + message="There are errors in the following fields, id: Input should be a valid string" + ) + + launcher_patch = validate_session_launcher_patch( + body, current_launcher, builds_config=self.session_repo.builds_config + ) launcher = await self.session_repo.update_launcher( user=user, launcher_id=launcher_id, patch=launcher_patch, session=session ) @@ -167,3 +190,72 @@ async def _get_launcher(_: Request, user: base_models.APIUser, project_id: ULID) return validated_json(apispec.SessionLaunchersList, launchers) return "/projects//session_launchers", ["GET"], _get_launcher + + +@dataclass(kw_only=True) +class BuildsBP(CustomBlueprint): + """Handlers for manipulating container image builds.""" + + session_repo: SessionRepository + authenticator: base_models.Authenticator + + def get_one(self) -> BlueprintFactoryResponse: + """Get a specific container image build.""" + + @authenticate(self.authenticator) + async def _get_one(_: Request, user: base_models.APIUser, build_id: ULID) -> JSONResponse: + build = await self.session_repo.get_build(user=user, build_id=build_id) + return validated_json(apispec_extras.Build, build) + + return "/builds/", ["GET"], _get_one + + def post(self) -> BlueprintFactoryResponse: + """Create a new container image build.""" + + @authenticate(self.authenticator) + @only_authenticated + async def _post(_: Request, user: base_models.APIUser, environment_id: ULID) -> JSONResponse: + new_build = validate_unsaved_build(environment_id=environment_id) + build = await self.session_repo.start_build(user=user, build=new_build) + return validated_json(apispec_extras.Build, build, status=201) + + return "/environments//builds", ["POST"], _post + + def patch(self) -> BlueprintFactoryResponse: + """Update a specific container image build.""" + + @authenticate(self.authenticator) + @only_authenticated + @validate(json=apispec.BuildPatch) + async def _patch( + _: Request, user: base_models.APIUser, build_id: ULID, body: apispec.BuildPatch + ) -> JSONResponse: + build_patch = validate_build_patch(body) + build = await self.session_repo.update_build(user=user, build_id=build_id, patch=build_patch) + return validated_json(apispec_extras.Build, build) + + return "/builds/", ["PATCH"], _patch + + def get_environment_builds(self) -> BlueprintFactoryResponse: + """Get all container image builds belonging to a session environment.""" + + @authenticate(self.authenticator) + async def _get_environment_builds(_: Request, user: base_models.APIUser, environment_id: ULID) -> JSONResponse: + builds = await self.session_repo.get_environment_builds(user=user, environment_id=environment_id) + return validated_json(apispec.BuildList, builds) + + return "/environments//builds", ["GET"], _get_environment_builds + + def get_logs(self) -> BlueprintFactoryResponse: + """Get the logs of a container image build.""" + + @authenticate(self.authenticator) + @only_authenticated + @validate(query=apispec.BuildsBuildIdLogsGetParametersQuery) + async def _get_logs( + _: Request, user: base_models.APIUser, build_id: ULID, query: apispec.BuildsBuildIdLogsGetParametersQuery + ) -> JSONResponse: + logs = await self.session_repo.get_build_logs(user=user, build_id=build_id, max_log_lines=query.max_lines) + return validated_json(apispec.BuildLogs, logs) + + return "/builds//logs", ["GET"], _get_logs diff --git a/components/renku_data_services/session/config.py b/components/renku_data_services/session/config.py new file mode 100644 index 000000000..92098e2b9 --- /dev/null +++ b/components/renku_data_services/session/config.py @@ -0,0 +1,87 @@ +"""Configuration for session module.""" + +import os +from dataclasses import dataclass +from datetime import timedelta + +from pydantic import ValidationError as PydanticValidationError + +from renku_data_services.app_config import logging +from renku_data_services.session import crs as session_crs + +logger = logging.getLogger(__name__) + + +@dataclass +class BuildsConfig: + """Configuration for container image builds.""" + + enabled: bool = False + build_output_image_prefix: str | None = None + vscodium_python_run_image: str | None = None + build_strategy_name: str | None = None + push_secret_name: str | None = None + buildrun_retention_after_failed: timedelta | None = None + buildrun_retention_after_succeeded: timedelta | None = None + buildrun_build_timeout: timedelta | None = None + node_selector: dict[str, str] | None = None + tolerations: list[session_crs.Toleration] | None = None + + @classmethod + def from_env(cls) -> "BuildsConfig": + """Create a config from environment variables.""" + enabled = os.environ.get("IMAGE_BUILDERS_ENABLED", "false").lower() == "true" + build_output_image_prefix = os.environ.get("BUILD_OUTPUT_IMAGE_PREFIX") + vscodium_python_run_image = os.environ.get("BUILD_VSCODIUM_PYTHON_RUN_IMAGE") + build_strategy_name = os.environ.get("BUILD_STRATEGY_NAME") + push_secret_name = os.environ.get("BUILD_PUSH_SECRET_NAME") + buildrun_retention_after_failed_seconds = int(os.environ.get("BUILD_RUN_RETENTION_AFTER_FAILED_SECONDS") or "0") + buildrun_retention_after_failed = ( + timedelta(seconds=buildrun_retention_after_failed_seconds) + if buildrun_retention_after_failed_seconds > 0 + else None + ) + buildrun_retention_after_succeeded_seconds = int( + os.environ.get("BUILD_RUN_RETENTION_AFTER_SUCCEEDED_SECONDS") or "0" + ) + buildrun_retention_after_succeeded = ( + timedelta(seconds=buildrun_retention_after_succeeded_seconds) + if buildrun_retention_after_succeeded_seconds > 0 + else None + ) + buildrun_build_timeout_seconds = int(os.environ.get("BUILD_RUN_BUILD_TIMEOUT") or "0") + buildrun_build_timeout = ( + timedelta(seconds=buildrun_build_timeout_seconds) if buildrun_build_timeout_seconds > 0 else None + ) + + if os.environ.get("DUMMY_STORES", "false").lower() == "true": + enabled = True # Enable image builds when running tests + + node_selector: dict[str, str] | None = None + node_selector_str = os.environ.get("BUILD_NODE_SELECTOR") + if node_selector_str: + try: + node_selector = session_crs.NodeSelector.model_validate_json(node_selector_str).root + except PydanticValidationError: + logger.error("Could not validate BUILD_NODE_SELECTOR. Will not use node selector for image builds.") + + tolerations: list[session_crs.Toleration] | None = None + tolerations_str = os.environ.get("BUILD_NODE_TOLERATIONS") + if tolerations_str: + try: + tolerations = session_crs.Tolerations.model_validate_json(tolerations_str).root + except PydanticValidationError: + logger.error("Could not validate BUILD_NODE_TOLERATIONS. Will not use tolerations for image builds.") + + return cls( + enabled=enabled or False, + build_output_image_prefix=build_output_image_prefix or None, + vscodium_python_run_image=vscodium_python_run_image or None, + build_strategy_name=build_strategy_name or None, + push_secret_name=push_secret_name or None, + buildrun_retention_after_failed=buildrun_retention_after_failed, + buildrun_retention_after_succeeded=buildrun_retention_after_succeeded, + buildrun_build_timeout=buildrun_build_timeout, + node_selector=node_selector, + tolerations=tolerations, + ) diff --git a/components/renku_data_services/session/constants.py b/components/renku_data_services/session/constants.py new file mode 100644 index 000000000..a9fdee0c9 --- /dev/null +++ b/components/renku_data_services/session/constants.py @@ -0,0 +1,63 @@ +"""Constants for sessions environments, session launchers and container image builds.""" + +import re +from datetime import timedelta +from pathlib import PurePosixPath +from re import Pattern +from typing import Final + +from renku_data_services.k8s.models import GVK + +BUILD_DEFAULT_OUTPUT_IMAGE_PREFIX: Final[str] = "harbor.dev.renku.ch/renku-builds/" +"""The default container image prefix for Renku builds.""" + +BUILD_OUTPUT_IMAGE_NAME: Final[str] = "renku-build" +"""The container image name created from Renku builds.""" + +BUILD_BUILDER_IMAGE: Final[str] = "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/selector:0.0.6" + +BUILD_RUN_IMAGE: Final[str] = "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/base-image:0.0.6" +BUILD_MOUNT_DIRECTORY: Final[PurePosixPath] = PurePosixPath("/home/renku/work") +BUILD_WORKING_DIRECTORY: Final[PurePosixPath] = BUILD_MOUNT_DIRECTORY +BUILD_UID: Final[int] = 1000 +BUILD_GID: Final[int] = 1000 +BUILD_PORT: Final[int] = 8888 +DEFAULT_URLS: Final[dict[str, str]] = { + "vscodium": "/", + "jupyterlab": "/lab", +} + +BUILD_DEFAULT_BUILD_STRATEGY_NAME: Final[str] = "renku-buildpacks-v2" +"""The name of the default build strategy.""" + +BUILD_DEFAULT_PUSH_SECRET_NAME: Final[str] = "renku-build-secret" +"""The name of the default secret to use when pushing Renku builds.""" + +BUILD_RUN_DEFAULT_RETENTION_AFTER_FAILED: Final[timedelta] = timedelta(minutes=5) +"""The default retention TTL for BuildRuns when in failed state.""" + +BUILD_RUN_DEFAULT_RETENTION_AFTER_SUCCEEDED: Final[timedelta] = timedelta(minutes=5) +"""The default retention TTL for BuildRuns when in succeeded state.""" + +BUILD_RUN_DEFAULT_TIMEOUT: Final[timedelta] = timedelta(hours=1) +"""The default timeout for build after which they get cancelled.""" + +BUILD_RUN_GVK: Final[GVK] = GVK(group="shipwright.io", version="v1beta1", kind="BuildRun") + +TASK_RUN_GVK: Final[GVK] = GVK(group="tekton.dev", version="v1", kind="TaskRun") + +DUMMY_TASK_RUN_USER_ID: Final[str] = "DummyTaskRunUser" +"""The user id to use for TaskRuns in the k8s cache. + +Note: we can't curently propagate labels to TaskRuns through shipwright, so we just use a dummy user id for all of them. +This might change if shipwright SHIP-0034 gets implemented. +""" + +# see https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_235 +ENV_VARIABLE_REGEX: Final[str] = r"^[a-zA-Z_][a-zA-Z0-9_]*$" +"""The regex to validate environment variable names. +see Name at https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html#tag_03_235 +""" + +ENV_VARIABLE_NAME_MATCHER: Final[Pattern[str]] = re.compile(ENV_VARIABLE_REGEX) +"""The compiled regex to validate environment variable names.""" diff --git a/components/renku_data_services/session/core.py b/components/renku_data_services/session/core.py index 32e307677..bfbea2f15 100644 --- a/components/renku_data_services/session/core.py +++ b/components/renku_data_services/session/core.py @@ -1,11 +1,14 @@ """Business logic for sessions.""" from pathlib import PurePosixPath +from typing import Union, cast from ulid import ULID +from renku_data_services import errors from renku_data_services.base_models.core import RESET, ResetType from renku_data_services.session import apispec, models +from renku_data_services.session.config import BuildsConfig def validate_unsaved_environment( @@ -25,86 +28,301 @@ def validate_unsaved_environment( environment_kind=environment_kind, args=environment.args, command=environment.command, + is_archived=environment.is_archived, + environment_image_source=models.EnvironmentImageSource.image, + ) + + +def validate_unsaved_build_parameters( + environment: apispec.BuildParameters | apispec.BuildParametersPatch, + builds_config: "BuildsConfig", +) -> models.UnsavedBuildParameters: + """Validate an unsaved build parameters object.""" + if not builds_config.enabled: + raise errors.ValidationError( + message="Image builds are not enabled, the field 'environment_image_source' cannot be set to 'build'." + ) + if environment.builder_variant is None: + raise errors.ValidationError(message="The field 'builder_variant' is required") + if environment.frontend_variant is None: + raise errors.ValidationError(message="The field 'frontend_variant' is required") + if environment.repository is None: + raise errors.ValidationError(message="The field 'repository' is required") + if environment.builder_variant not in models.BuilderVariant: + raise errors.ValidationError( + message=( + f"Invalid value for the field 'builder_variant': {environment.builder_variant}: " + f"Valid values are {[e.value for e in models.BuilderVariant]}" + ) + ) + if environment.frontend_variant not in models.FrontendVariant: + raise errors.ValidationError( + message=( + f"Invalid value for the field 'frontend_variant': {environment.frontend_variant}: " + f"Valid values are {[e.value for e in models.FrontendVariant]}" + ) + ) + + return models.UnsavedBuildParameters( + repository=environment.repository, + builder_variant=environment.builder_variant, + frontend_variant=environment.frontend_variant, + repository_revision=environment.repository_revision, + context_dir=environment.context_dir, + ) + + +def validate_build_parameters_patch(environment: apispec.BuildParametersPatch) -> models.BuildParametersPatch: + """Validate an unsaved build parameters object.""" + if environment.builder_variant is not None and environment.builder_variant not in models.BuilderVariant: + raise errors.ValidationError( + message=( + f"Invalid value for the field 'builder_variant': {environment.builder_variant}: " + f"Valid values are {[e.value for e in models.BuilderVariant]}" + ) + ) + if environment.frontend_variant is not None and environment.frontend_variant not in models.FrontendVariant: + raise errors.ValidationError( + message=( + f"Invalid value for the field 'frontend_variant': {environment.frontend_variant}: " + f"Valid values are {[e.value for e in models.FrontendVariant]}" + ) + ) + + return models.BuildParametersPatch( + repository=environment.repository, + builder_variant=environment.builder_variant, + frontend_variant=environment.frontend_variant, + repository_revision=environment.repository_revision, + context_dir=environment.context_dir, ) def validate_environment_patch(patch: apispec.EnvironmentPatch) -> models.EnvironmentPatch: """Validate the update to a session environment.""" data_dict = patch.model_dump(exclude_unset=True, mode="json") + working_directory: PurePosixPath | ResetType | None + match patch.working_directory: + case "": + working_directory = RESET + case str(): + working_directory = PurePosixPath(patch.working_directory) + case _: + working_directory = None + mount_directory: PurePosixPath | ResetType | None + match patch.mount_directory: + case "": + mount_directory = RESET + case str(): + mount_directory = PurePosixPath(patch.mount_directory) + case _: + mount_directory = None return models.EnvironmentPatch( name=patch.name, description=patch.description, container_image=patch.container_image, default_url=patch.default_url, port=patch.port, - working_directory=PurePosixPath(patch.working_directory) if patch.working_directory else None, - mount_directory=PurePosixPath(patch.mount_directory) if patch.mount_directory else None, + working_directory=working_directory, + mount_directory=mount_directory, uid=patch.uid, gid=patch.gid, args=RESET if "args" in data_dict and data_dict["args"] is None else patch.args, command=RESET if "command" in data_dict and data_dict["command"] is None else patch.command, + is_archived=patch.is_archived, + ) + + +def validate_environment_patch_in_launcher(patch: apispec.EnvironmentPatchInLauncher) -> models.EnvironmentPatch: + """Validate the update to a session environment inside a session launcher.""" + environment_patch = validate_environment_patch(patch) + environment_patch.environment_image_source = ( + None + if patch.environment_image_source is None + else models.EnvironmentImageSource(patch.environment_image_source.value) + ) + environment_patch.build_parameters = ( + None if patch.build_parameters is None else validate_build_parameters_patch(patch.build_parameters) ) + return environment_patch -def validate_unsaved_session_launcher(launcher: apispec.SessionLauncherPost) -> models.UnsavedSessionLauncher: +def validate_unsaved_session_launcher( + launcher: apispec.SessionLauncherPost, builds_config: "BuildsConfig" +) -> models.UnsavedSessionLauncher: """Validate an unsaved session launcher.""" + + environment: Union[str, models.UnsavedBuildParameters, models.UnsavedEnvironment] + if isinstance(launcher.environment, apispec.EnvironmentIdOnlyPost): + environment = launcher.environment.id + elif isinstance(launcher.environment, apispec.BuildParametersPost): + environment = validate_unsaved_build_parameters(launcher.environment, builds_config=builds_config) + elif isinstance(launcher.environment, apispec.EnvironmentPostInLauncherHelper): + environment_helper: apispec.EnvironmentPost = launcher.environment + environment = validate_unsaved_environment(environment_helper, models.EnvironmentKind.CUSTOM) + else: + raise errors.ValidationError(message=f"Unexpected environment type: {type(launcher.environment)}") + return models.UnsavedSessionLauncher( project_id=ULID.from_str(launcher.project_id), name=launcher.name, description=launcher.description, resource_class_id=launcher.resource_class_id, + disk_storage=launcher.disk_storage, + env_variables=models.EnvVar.from_apispec(launcher.env_variables) if launcher.env_variables else None, # NOTE: When you create an environment with a launcher the environment can only be custom - environment=validate_unsaved_environment(launcher.environment, models.EnvironmentKind.CUSTOM) - if isinstance(launcher.environment, apispec.EnvironmentPostInLauncher) - else launcher.environment.id, + environment=environment, ) def validate_session_launcher_patch( - patch: apispec.SessionLauncherPatch, current_launcher: models.SessionLauncher + patch: apispec.SessionLauncherPatch, current_launcher: models.SessionLauncher, builds_config: "BuildsConfig" ) -> models.SessionLauncherPatch: """Validate the update to a session launcher.""" data_dict = patch.model_dump(exclude_unset=True, mode="json") - environment: str | models.EnvironmentPatch | models.UnsavedEnvironment | None = None - if ( - isinstance(patch.environment, apispec.EnvironmentPatchInLauncher) - and current_launcher is not None - and current_launcher.environment.environment_kind == models.EnvironmentKind.GLOBAL - and patch.environment.environment_kind == apispec.EnvironmentKind.CUSTOM - ): - # This means that the global environment is being swapped for a custom one, - # so we have to create a brand new environment, but we have to validate here. - validated_env = apispec.EnvironmentPostInLauncher.model_validate(data_dict["environment"]) - environment = models.UnsavedEnvironment( - name=validated_env.name, - description=validated_env.description, - container_image=validated_env.container_image, - default_url=validated_env.default_url, - port=validated_env.port, - working_directory=PurePosixPath(validated_env.working_directory) - if validated_env.working_directory - else None, - mount_directory=PurePosixPath(validated_env.mount_directory) if validated_env.mount_directory else None, - uid=validated_env.uid, - gid=validated_env.gid, - environment_kind=models.EnvironmentKind(validated_env.environment_kind.value), - args=validated_env.args, - command=validated_env.command, - ) - elif isinstance(patch.environment, apispec.EnvironmentPatchInLauncher): - environment = validate_environment_patch(patch.environment) + environment: str | models.EnvironmentPatch | models.UnsavedEnvironment | models.UnsavedBuildParameters | None = None + if isinstance(patch.environment, apispec.EnvironmentPatchInLauncher): # The patch is for a custom environment + match current_launcher.environment.environment_kind, patch.environment.environment_kind: + case models.EnvironmentKind.GLOBAL, apispec.EnvironmentKind.CUSTOM: + # This means that the global environment is being swapped for a custom one, + # so we have to create a brand-new environment, but we have to validate here. + if ( + patch.environment.environment_image_source == apispec.EnvironmentImageSourceImage.image + or patch.environment.environment_image_source is None + ): + # NOTE: The custom environment is being created from an image. + validated_env = apispec.EnvironmentPostInLauncherHelper.model_validate(data_dict["environment"]) + environment = models.UnsavedEnvironment( + name=validated_env.name, + description=validated_env.description, + container_image=validated_env.container_image, + default_url=validated_env.default_url, + port=validated_env.port, + working_directory=PurePosixPath(validated_env.working_directory) + if validated_env.working_directory + else None, + mount_directory=PurePosixPath(validated_env.mount_directory) + if validated_env.mount_directory + else None, + uid=validated_env.uid, + gid=validated_env.gid, + environment_kind=models.EnvironmentKind.CUSTOM, + args=validated_env.args, + command=validated_env.command, + environment_image_source=models.EnvironmentImageSource.image, + ) + elif patch.environment.environment_image_source == apispec.EnvironmentImageSourceBuild.build: + # NOTE: The environment type is changed to be built, so, all required fields should be passed (as in + # a POST request). + validated_build_parameters = apispec.BuildParameters.model_validate( + data_dict.get("environment", {}).get("build_parameters", {}) + ) + environment = validate_unsaved_build_parameters( + validated_build_parameters, builds_config=builds_config + ) + case models.EnvironmentKind.GLOBAL, None: + # Trying to patch a global environment with a custom environment patch. + raise errors.ValidationError( + message=( + "There are errors in the following fields, environment.environment_kind: Input should be " + "'custom'" + ) + ) + case _, apispec.EnvironmentKind.GLOBAL: + # This means that the custom environment is being swapped for a global one, but the patch is for a + # custom environment. + raise errors.ValidationError( + message="There are errors in the following fields, environment.id: Input should be a valid string" + ) + case models.EnvironmentKind.CUSTOM, _: + # This means that the custom environment is being updated. + current = current_launcher.environment.environment_image_source.value + new = ( + patch.environment.environment_image_source.value + if patch.environment.environment_image_source + else None + ) + + if ( + new == "image" or (new is None and current == "image") + ) and patch.environment.build_parameters is not None: + raise errors.ValidationError( + message="There are errors in the following fields, environment.build_parameters: Must be null" + ) + elif ( + # TODO: Add a test for new == None/"build" and current == "build" + new == "build" or (new is None and current == "build") + ) and patch.environment.build_parameters is None: + raise errors.ValidationError( + message="There are errors in the following fields, environment.build_parameters: Must be set" + ) + + if current == "image" and new == "build": + # NOTE: We've checked that patch.environment.build_parameters is not None in the previous if block. + build_parameters = cast(apispec.BuildParametersPost, patch.environment.build_parameters) + # NOTE: The environment type is changed to be built, so, all required fields should be passed (as in + # a POST request). No need to get values from the current env, since they will be set by the build. + environment = validate_unsaved_build_parameters(build_parameters, builds_config=builds_config) + elif current == "build" and new == "image": + environment = data_dict["environment"] + assert isinstance(environment, dict) + if environment.get("name") is None: # type: ignore + environment["name"] = current_launcher.environment.name + validated_env = apispec.EnvironmentPostInLauncherHelper.model_validate(environment) + environment = models.UnsavedEnvironment( + name=validated_env.name, + description=validated_env.description, + container_image=validated_env.container_image, + default_url=validated_env.default_url, + port=validated_env.port, + working_directory=PurePosixPath(validated_env.working_directory) + if validated_env.working_directory + else None, + mount_directory=PurePosixPath(validated_env.mount_directory) + if validated_env.mount_directory + else None, + uid=validated_env.uid, + gid=validated_env.gid, + environment_kind=models.EnvironmentKind.CUSTOM, + args=validated_env.args, + command=validated_env.command, + environment_image_source=models.EnvironmentImageSource.image, + ) + else: + environment = validate_environment_patch_in_launcher(patch.environment) elif isinstance(patch.environment, apispec.EnvironmentIdOnlyPatch): environment = patch.environment.id - resource_class_id: int | None | ResetType = None + resource_class_id: int | None | ResetType if "resource_class_id" in data_dict and data_dict["resource_class_id"] is None: # NOTE: This means that the resource class set in the DB should be removed so that the # default resource class currently set in the CRC will be used. resource_class_id = RESET else: resource_class_id = patch.resource_class_id + disk_storage = RESET if "disk_storage" in data_dict and data_dict["disk_storage"] is None else patch.disk_storage + env_variables = ( + RESET + if "env_variables" in data_dict and (data_dict["env_variables"] is None or len(data_dict["env_variables"]) == 0) + else models.EnvVar.from_apispec(patch.env_variables) + if patch.env_variables + else None + ) return models.SessionLauncherPatch( name=patch.name, description=patch.description, environment=environment, resource_class_id=resource_class_id, + disk_storage=disk_storage, + env_variables=env_variables, ) + + +def validate_unsaved_build(environment_id: ULID) -> models.UnsavedBuild: + """Validate an unsaved container image build.""" + return models.UnsavedBuild(environment_id=environment_id) + + +def validate_build_patch(patch: apispec.BuildPatch) -> models.BuildPatch: + """Validate the update to a session launcher.""" + status = models.BuildStatus(patch.status.value) if patch.status else None + return models.BuildPatch(status=status) diff --git a/components/renku_data_services/session/cr_base.py b/components/renku_data_services/session/cr_base.py new file mode 100644 index 000000000..ec75db447 --- /dev/null +++ b/components/renku_data_services/session/cr_base.py @@ -0,0 +1,12 @@ +"""Base models for K8s CRD specifications.""" + +from pydantic import BaseModel + + +class BaseCRD(BaseModel): + """Base CRD specification.""" + + class Config: + """Do not exclude unknown properties.""" + + extra = "allow" diff --git a/components/renku_data_services/session/cr_shipwright_buildrun.py b/components/renku_data_services/session/cr_shipwright_buildrun.py new file mode 100644 index 000000000..5cc763bd9 --- /dev/null +++ b/components/renku_data_services/session/cr_shipwright_buildrun.py @@ -0,0 +1,3713 @@ +# generated by datamodel-codegen: +# filename: +# timestamp: 2025-03-03T12:23:11+00:00 + +from __future__ import annotations + +from datetime import datetime +from enum import Enum +from typing import Any, Dict, List, Optional, Union + +from pydantic import ConfigDict, Field +from renku_data_services.session.cr_base import BaseCRD + + +class ConfigMapKeyRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="The key to select.") + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="Specify whether the ConfigMap or its key must be defined", + ) + + +class FieldRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + apiVersion: Optional[str] = Field( + default=None, + description='Version of the schema the FieldPath is written in terms of, defaults to "v1".', + ) + fieldPath: str = Field( + ..., description="Path of the field to select in the specified API version." + ) + + +class ResourceFieldRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + containerName: Optional[str] = Field( + default=None, + description="Container name: required for volumes, optional for env vars", + ) + divisor: Optional[Union[int, str]] = Field( + default=None, + description='Specifies the output format of the exposed resources, defaults to "1"', + ) + resource: str = Field(..., description="Required: resource to select") + + +class SecretKeyRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field( + ..., + description="The key of the secret to select from. Must be a valid secret key.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="Specify whether the Secret or its key must be defined", + ) + + +class ValueFrom(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapKeyRef: Optional[ConfigMapKeyRef] = Field( + default=None, description="Selects a key of a ConfigMap." + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Selects a field of the pod: supports metadata.name, metadata.namespace, `metadata.labels['']`, `metadata.annotations['']`,\nspec.nodeName, spec.serviceAccountName, status.hostIP, status.podIP, status.podIPs.", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, limits.ephemeral-storage, requests.cpu, requests.memory and requests.ephemeral-storage) are currently supported.", + ) + secretKeyRef: Optional[SecretKeyRef] = Field( + default=None, description="Selects a key of a secret in the pod's namespace" + ) + + +class EnvItem(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: str = Field( + ..., description="Name of the environment variable. Must be a C_IDENTIFIER." + ) + value: Optional[str] = Field( + default=None, + description='Variable references $(VAR_NAME) are expanded\nusing the previously defined environment variables in the container and\nany service environment variables. If a variable cannot be resolved,\nthe reference in the input string will be unchanged. Double $$ are reduced\nto a single $, which allows for escaping the $(VAR_NAME) syntax: i.e.\n"$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)".\nEscaped references will never be expanded, regardless of whether the variable\nexists or not.\nDefaults to "".', + ) + valueFrom: Optional[ValueFrom] = Field( + default=None, + description="Source for the environment variable's value. Cannot be used if value is not empty.", + ) + + +class Severity(Enum): + low = "low" + medium = "medium" + high = "high" + + +class Ignore(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + id: Optional[List[str]] = Field( + default=None, + description="ID references the security issues to be ignored in vulnerability scan", + ) + severity: Optional[Severity] = Field( + default=None, + description='Severity denotes the severity levels of security issues to be ignored, valid values are:\n- "low": it will exclude low severity vulnerabilities, displaying only medium, high and critical vulnerabilities\n- "medium": it will exclude low and medium severity vulnerabilities, displaying only high and critical vulnerabilities\n- "high": it will exclude low, medium and high severity vulnerabilities, displaying only the critical vulnerabilities', + ) + unfixed: Optional[bool] = Field( + default=None, + description="Unfixed indicates to ignore vulnerabilities for which no fix exists", + ) + + +class VulnerabilityScan(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + enabled: Optional[bool] = Field( + default=None, + description="Enabled indicates whether to run vulnerability scan for image", + ) + failOnFinding: Optional[bool] = Field( + default=None, + description="FailOnFinding indicates whether to fail the build run if the vulnerability scan results in vulnerabilities", + ) + ignore: Optional[Ignore] = Field( + default=None, + description="Ignore refers to ignore options for vulnerability scan", + ) + + +class Output(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + annotations: Optional[Dict[str, str]] = Field( + default=None, + description="Annotations references the additional annotations to be applied on the image", + ) + image: str = Field(..., description="Image is the reference of the image.") + insecure: Optional[bool] = Field( + default=None, description="Insecure defines whether the registry is not secure" + ) + labels: Optional[Dict[str, str]] = Field( + default=None, + description="Labels references the additional labels to be applied on the image", + ) + pushSecret: Optional[str] = Field( + default=None, + description="Describes the secret name for pushing a container image.", + ) + timestamp: Optional[str] = Field( + default=None, + description='Timestamp references the optional image timestamp to be set, valid values are:\n- "Zero", to set 00:00:00 UTC on 1 January 1970\n- "SourceTimestamp", to set the source timestamp dereived from the input source\n- "BuildTimestamp", to set the timestamp of the current build itself\n- Parsable integer number defined as the epoch seconds\n- or nil/empty to not set any specific timestamp', + ) + vulnerabilityScan: Optional[VulnerabilityScan] = Field( + default=None, + description="VulnerabilityScan provides configurations about running a scan for your generated image", + ) + + +class ConfigMapValue(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + format: Optional[str] = Field( + default=None, + description="An optional format to add pre- or suffix to the object value. For example 'KEY=${SECRET_VALUE}' or 'KEY=${CONFIGMAP_VALUE}' depending on the context.", + ) + key: str = Field(..., description="Key inside the object") + name: str = Field(..., description="Name of the object") + + +class SecretValue(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + format: Optional[str] = Field( + default=None, + description="An optional format to add pre- or suffix to the object value. For example 'KEY=${SECRET_VALUE}' or 'KEY=${CONFIGMAP_VALUE}' depending on the context.", + ) + key: str = Field(..., description="Key inside the object") + name: str = Field(..., description="Name of the object") + + +class Value(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapValue: Optional[ConfigMapValue] = Field( + default=None, description="The ConfigMap value of the parameter" + ) + secretValue: Optional[SecretValue] = Field( + default=None, description="The secret value of the parameter" + ) + value: Optional[str] = Field(default=None, description="The value of the parameter") + + +class ParamValue(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapValue: Optional[ConfigMapValue] = Field( + default=None, description="The ConfigMap value of the parameter" + ) + name: str = Field(..., description="Name of the parameter") + secretValue: Optional[SecretValue] = Field( + default=None, description="The secret value of the parameter" + ) + value: Optional[str] = Field(default=None, description="The value of the parameter") + values: Optional[List[Value]] = Field( + default=None, description="Values of an array parameter" + ) + + +class Retention(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + atBuildDeletion: Optional[bool] = Field( + default=None, + description="AtBuildDeletion defines if related BuildRuns should be deleted when deleting the Build.", + ) + failedLimit: Optional[int] = Field( + default=None, + description="FailedLimit defines the maximum number of failed buildruns that should exist.", + ge=1, + le=10000, + ) + succeededLimit: Optional[int] = Field( + default=None, + description="SucceededLimit defines the maximum number of succeeded buildruns that should exist.", + ge=1, + le=10000, + ) + ttlAfterFailed: Optional[str] = Field( + default=None, + description="TTLAfterFailed defines the maximum duration of time the failed buildrun should exist.", + ) + ttlAfterSucceeded: Optional[str] = Field( + default=None, + description="TTLAfterSucceeded defines the maximum duration of time the succeeded buildrun should exist.", + ) + + +class Git(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + cloneSecret: Optional[str] = Field( + default=None, + description="CloneSecret references a Secret that contains credentials to access\nthe repository.", + ) + revision: Optional[str] = Field( + default=None, + description="Revision describes the Git revision (e.g., branch, tag, commit SHA,\netc.) to fetch.\n\n\nIf not defined, it will fallback to the repository's default branch.", + ) + url: str = Field(..., description="URL describes the URL of the Git repository.") + + +class Local(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: Optional[str] = Field(default=None, description="Name of the local step") + timeout: Optional[str] = Field( + default=None, + description="Timeout how long the BuildSource execution must take.", + ) + + +class OciArtifact(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + image: str = Field(..., description="Image reference, i.e. quay.io/org/image:tag") + prune: Optional[str] = Field( + default=None, + description="Prune specifies whether the image is suppose to be deleted. Allowed\nvalues are 'Never' (no deletion) and `AfterPull` (removal after the\nimage was successfully pulled from the registry).\n\n\nIf not defined, it defaults to 'Never'.", + ) + pullSecret: Optional[str] = Field( + default=None, + description="PullSecret references a Secret that contains credentials to access\nthe repository.", + ) + + +class Source(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + contextDir: Optional[str] = Field( + default=None, + description="ContextDir is a path to subfolder in the repo. Optional.", + ) + git: Optional[Git] = Field( + default=None, description="Git contains the details for the source of type Git" + ) + local: Optional[Local] = Field( + default=None, + description="Local contains the details for the source of type Local", + ) + ociArtifact: Optional[OciArtifact] = Field( + default=None, + description="OCIArtifact contains the details for the source of type OCIArtifact", + ) + type: str = Field( + ..., description="Type is the BuildSource qualifier, the type of the source." + ) + + +class Strategy(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + kind: Optional[str] = Field( + default=None, + description="BuildStrategyKind indicates the kind of the buildstrategy, namespaced or cluster scoped.", + ) + name: str = Field( + ..., + description="Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names", + ) + + +class Toleration(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + effect: Optional[str] = Field( + default=None, + description="Effect indicates the taint effect to match. Empty means match all taint effects.\nWhen specified, allowed values are NoSchedule, PreferNoSchedule and NoExecute.", + ) + key: Optional[str] = Field( + default=None, + description="Key is the taint key that the toleration applies to. Empty means match all taint keys.\nIf the key is empty, operator must be Exists; this combination means to match all values and all keys.", + ) + operator: Optional[str] = Field( + default=None, + description="Operator represents a key's relationship to the value.\nValid operators are Exists and Equal. Defaults to Equal.\nExists is equivalent to wildcard for value, so that a pod can\ntolerate all taints of a particular category.", + ) + tolerationSeconds: Optional[int] = Field( + default=None, + description="TolerationSeconds represents the period of time the toleration (which must be\nof effect NoExecute, otherwise this field is ignored) tolerates the taint. By default,\nit is not set, which means tolerate the taint forever (do not evict). Zero and\nnegative values will be treated as 0 (evict immediately) by the system.", + ) + value: Optional[str] = Field( + default=None, + description="Value is the taint value the toleration matches to.\nIf the operator is Exists, the value should be empty, otherwise just a regular string.", + ) + + +class Github(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + branches: Optional[List[str]] = Field( + default=None, + description="Branches slice of branch names where the event applies.", + ) + events: Optional[List[str]] = Field( + default=None, description="Events GitHub event names.", min_length=1 + ) + + +class Image(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + names: Optional[List[str]] = Field( + default=None, description="Names fully qualified image names." + ) + + +class ObjectRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: Optional[str] = Field(default=None, description="Name target object name.") + selector: Optional[Dict[str, str]] = Field( + default=None, description="Selector label selector." + ) + status: Optional[List[str]] = Field( + default=None, description="Status object status." + ) + + +class WhenItem(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + github: Optional[Github] = Field( + default=None, + description="GitHub describes how to trigger builds based on GitHub (SCM) events.", + ) + image: Optional[Image] = Field( + default=None, description="Image slice of image names where the event applies." + ) + name: str = Field( + ..., description="Name name or the short description of the trigger condition." + ) + objectRef: Optional[ObjectRef] = Field( + default=None, + description="ObjectRef describes how to match a foreign resource, either using the name or the label\nselector, plus the current resource status.", + ) + type: str = Field(..., description="Type the event type") + + +class Trigger(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + triggerSecret: Optional[str] = Field( + default=None, + description="TriggerSecret points to a local object carrying the secret token to validate webhook request.", + ) + when: Optional[List[WhenItem]] = Field( + default=None, + description="When the list of scenarios when a new build should take place.", + ) + + +class AwsElasticBlockStore(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + partition: Optional[int] = Field( + default=None, + description='partition is the partition in the volume that you want to mount.\nIf omitted, the default is to mount by volume name.\nExamples: For volume /dev/sda1, you specify the partition as "1".\nSimilarly, the volume partition for /dev/sda is "0" (or you can leave the property empty).', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly value true will force the readOnly setting in VolumeMounts.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", + ) + volumeID: str = Field( + ..., + description="volumeID is unique ID of the persistent disk resource in AWS (Amazon EBS volume).\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", + ) + + +class AzureDisk(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + cachingMode: Optional[str] = Field( + default=None, + description="cachingMode is the Host Caching mode: None, Read Only, Read Write.", + ) + diskName: str = Field( + ..., description="diskName is the Name of the data disk in the blob storage" + ) + diskURI: str = Field( + ..., description="diskURI is the URI of data disk in the blob storage" + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is Filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', + ) + kind: Optional[str] = Field( + default=None, + description="kind expected values are Shared: multiple blob disks per storage account Dedicated: single blob disk per storage account Managed: azure managed data disk (only in managed availability set). defaults to shared", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + + +class AzureFile(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretName: str = Field( + ..., + description="secretName is the name of secret that contains Azure Storage Account Name and Key", + ) + shareName: str = Field(..., description="shareName is the azure share Name") + + +class SecretRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + + +class Cephfs(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + monitors: List[str] = Field( + ..., + description="monitors is Required: Monitors is a collection of Ceph monitors\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + path: Optional[str] = Field( + default=None, + description="path is Optional: Used as the mounted root, rather than the full Ceph tree, default is /", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly is Optional: Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + secretFile: Optional[str] = Field( + default=None, + description="secretFile is Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is Optional: SecretRef is reference to the authentication secret for User, default is empty.\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + user: Optional[str] = Field( + default=None, + description="user is optional: User is the rados user name, default is admin\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + + +class Cinder(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is optional: points to a secret object containing parameters used to connect\nto OpenStack.", + ) + volumeID: str = Field( + ..., + description="volumeID used to identify the volume in cinder.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + + +class Item(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="key is the key to project.") + mode: Optional[int] = Field( + default=None, + description="mode is Optional: mode bits used to set permissions on this file.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="path is the relative path of the file to map the key to.\nMay not be an absolute path.\nMay not contain the path element '..'.\nMay not start with the string '..'.", + ) + + +class ConfigMap(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode is optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional specify whether the ConfigMap or its keys must be defined", + ) + + +class NodePublishSecretRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + + +class Csi(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + driver: str = Field( + ..., + description="driver is the name of the CSI driver that handles this volume.\nConsult with your admin for the correct name as registered in the cluster.", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType to mount. Ex. "ext4", "xfs", "ntfs".\nIf not provided, the empty value is passed to the associated CSI driver\nwhich will determine the default filesystem to apply.', + ) + nodePublishSecretRef: Optional[NodePublishSecretRef] = Field( + default=None, + description="nodePublishSecretRef is a reference to the secret object containing\nsensitive information to pass to the CSI driver to complete the CSI\nNodePublishVolume and NodeUnpublishVolume calls.\nThis field is optional, and may be empty if no secret is required. If the\nsecret object contains more than one secret, all secret references are passed.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly specifies a read-only configuration for the volume.\nDefaults to false (read/write).", + ) + volumeAttributes: Optional[Dict[str, str]] = Field( + default=None, + description="volumeAttributes stores driver-specific properties that are passed to the CSI\ndriver. Consult your driver's documentation for supported values.", + ) + + +class Item1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", + ) + mode: Optional[int] = Field( + default=None, + description="Optional: mode bits used to set permissions on this file, must be an octal value\nbetween 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", + ) + + +class DownwardAPI(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="Optional: mode bits to use on created files by default. Must be a\nOptional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item1]] = Field( + default=None, description="Items is a list of downward API volume file" + ) + + +class EmptyDir(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + medium: Optional[str] = Field( + default=None, + description='medium represents what type of storage medium should back this directory.\nThe default is "" which means to use the node\'s default medium.\nMust be an empty string (default) or Memory.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir', + ) + sizeLimit: Optional[Union[int, str]] = Field( + default=None, + description="sizeLimit is the total amount of local storage required for this EmptyDir volume.\nThe size limit is also applicable for memory medium.\nThe maximum usage on memory medium EmptyDir would be the minimum value between\nthe SizeLimit specified here and the sum of memory limits of all containers in a pod.\nThe default is nil which means that the limit is undefined.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir", + ) + + +class DataSource(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + apiGroup: Optional[str] = Field( + default=None, + description="APIGroup is the group for the resource being referenced.\nIf APIGroup is not specified, the specified Kind must be in the core API group.\nFor any other third-party types, APIGroup is required.", + ) + kind: str = Field(..., description="Kind is the type of resource being referenced") + name: str = Field(..., description="Name is the name of resource being referenced") + + +class DataSourceRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + apiGroup: Optional[str] = Field( + default=None, + description="APIGroup is the group for the resource being referenced.\nIf APIGroup is not specified, the specified Kind must be in the core API group.\nFor any other third-party types, APIGroup is required.", + ) + kind: str = Field(..., description="Kind is the type of resource being referenced") + name: str = Field(..., description="Name is the name of resource being referenced") + namespace: Optional[str] = Field( + default=None, + description="Namespace is the namespace of resource being referenced\nNote that when a namespace is specified, a gateway.networking.k8s.io/ReferenceGrant object is required in the referent namespace to allow that namespace's owner to accept the reference. See the ReferenceGrant documentation for details.\n(Alpha) This field requires the CrossNamespaceVolumeDataSource feature gate to be enabled.", + ) + + +class Resources(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + limits: Optional[Dict[str, Union[int, str]]] = Field( + default=None, + description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", + ) + requests: Optional[Dict[str, Union[int, str]]] = Field( + default=None, + description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", + ) + + +class MatchExpression(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field( + ..., description="key is the label key that the selector applies to." + ) + operator: str = Field( + ..., + description="operator represents a key's relationship to a set of values.\nValid operators are In, NotIn, Exists and DoesNotExist.", + ) + values: Optional[List[str]] = Field( + default=None, + description="values is an array of string values. If the operator is In or NotIn,\nthe values array must be non-empty. If the operator is Exists or DoesNotExist,\nthe values array must be empty. This array is replaced during a strategic\nmerge patch.", + ) + + +class Selector(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + matchExpressions: Optional[List[MatchExpression]] = Field( + default=None, + description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", + ) + matchLabels: Optional[Dict[str, str]] = Field( + default=None, + description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', + ) + + +class Spec2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + accessModes: Optional[List[str]] = Field( + default=None, + description="accessModes contains the desired access modes the volume should have.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1", + ) + dataSource: Optional[DataSource] = Field( + default=None, + description="dataSource field can be used to specify either:\n* An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot)\n* An existing PVC (PersistentVolumeClaim)\nIf the provisioner or an external controller can support the specified data source,\nit will create a new volume based on the contents of the specified data source.\nWhen the AnyVolumeDataSource feature gate is enabled, dataSource contents will be copied to dataSourceRef,\nand dataSourceRef contents will be copied to dataSource when dataSourceRef.namespace is not specified.\nIf the namespace is specified, then dataSourceRef will not be copied to dataSource.", + ) + dataSourceRef: Optional[DataSourceRef] = Field( + default=None, + description="dataSourceRef specifies the object from which to populate the volume with data, if a non-empty\nvolume is desired. This may be any object from a non-empty API group (non\ncore object) or a PersistentVolumeClaim object.\nWhen this field is specified, volume binding will only succeed if the type of\nthe specified object matches some installed volume populator or dynamic\nprovisioner.\nThis field will replace the functionality of the dataSource field and as such\nif both fields are non-empty, they must have the same value. For backwards\ncompatibility, when namespace isn't specified in dataSourceRef,\nboth fields (dataSource and dataSourceRef) will be set to the same\nvalue automatically if one of them is empty and the other is non-empty.\nWhen namespace is specified in dataSourceRef,\ndataSource isn't set to the same value and must be empty.\nThere are three important differences between dataSource and dataSourceRef:\n* While dataSource only allows two specific types of objects, dataSourceRef\n allows any non-core object, as well as PersistentVolumeClaim objects.\n* While dataSource ignores disallowed values (dropping them), dataSourceRef\n preserves all values, and generates an error if a disallowed value is\n specified.\n* While dataSource only allows local objects, dataSourceRef allows objects\n in any namespaces.\n(Beta) Using this field requires the AnyVolumeDataSource feature gate to be enabled.\n(Alpha) Using the namespace field of dataSourceRef requires the CrossNamespaceVolumeDataSource feature gate to be enabled.", + ) + resources: Optional[Resources] = Field( + default=None, + description="resources represents the minimum resources the volume should have.\nIf RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements\nthat are lower than previous value but must still be higher than capacity recorded in the\nstatus field of the claim.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources", + ) + selector: Optional[Selector] = Field( + default=None, + description="selector is a label query over volumes to consider for binding.", + ) + storageClassName: Optional[str] = Field( + default=None, + description="storageClassName is the name of the StorageClass required by the claim.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1", + ) + volumeAttributesClassName: Optional[str] = Field( + default=None, + description="volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim.\nIf specified, the CSI driver will create or update the volume with the attributes defined\nin the corresponding VolumeAttributesClass. This has a different purpose than storageClassName,\nit can be changed after the claim is created. An empty string value means that no VolumeAttributesClass\nwill be applied to the claim but it's not allowed to reset this field to empty string once it is set.\nIf unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass\nwill be set by the persistentvolume controller if it exists.\nIf the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be\nset to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource\nexists.\nMore info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/\n(Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.", + ) + volumeMode: Optional[str] = Field( + default=None, + description="volumeMode defines what type of volume is required by the claim.\nValue of Filesystem is implied when not included in claim spec.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the binding reference to the PersistentVolume backing this claim.", + ) + + +class VolumeClaimTemplate(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + metadata: Optional[Dict[str, Any]] = Field( + default=None, + description="May contain labels and annotations that will be copied into the PVC\nwhen creating it. No other fields are allowed and will be rejected during\nvalidation.", + ) + spec: Spec2 = Field( + ..., + description="The specification for the PersistentVolumeClaim. The entire content is\ncopied unchanged into the PVC that gets created from this\ntemplate. The same fields as in a PersistentVolumeClaim\nare also valid here.", + ) + + +class Ephemeral(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + volumeClaimTemplate: Optional[VolumeClaimTemplate] = Field( + default=None, + description="Will be used to create a stand-alone PVC to provision the volume.\nThe pod in which this EphemeralVolumeSource is embedded will be the\nowner of the PVC, i.e. the PVC will be deleted together with the\npod. The name of the PVC will be `-` where\n`` is the name from the `PodSpec.Volumes` array\nentry. Pod validation will reject the pod if the concatenated name\nis not valid for a PVC (for example, too long).\n\n\nAn existing PVC with that name that is not owned by the pod\nwill *not* be used for the pod to avoid using an unrelated\nvolume by mistake. Starting the pod is then blocked until\nthe unrelated PVC is removed. If such a pre-created PVC is\nmeant to be used by the pod, the PVC has to updated with an\nowner reference to the pod once the pod exists. Normally\nthis should not be necessary, but it may be useful when\nmanually reconstructing a broken cluster.\n\n\nThis field is read-only and no changes will be made by Kubernetes\nto the PVC after it has been created.\n\n\nRequired, must not be nil.", + ) + + +class Fc(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + lun: Optional[int] = Field( + default=None, description="lun is Optional: FC target lun number" + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly is Optional: Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + targetWWNs: Optional[List[str]] = Field( + default=None, + description="targetWWNs is Optional: FC target worldwide names (WWNs)", + ) + wwids: Optional[List[str]] = Field( + default=None, + description="wwids Optional: FC volume world wide identifiers (wwids)\nEither wwids or combination of targetWWNs and lun must be set, but not both simultaneously.", + ) + + +class FlexVolume(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + driver: str = Field( + ..., description="driver is the name of the driver to use for this volume." + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script.', + ) + options: Optional[Dict[str, str]] = Field( + default=None, + description="options is Optional: this field holds extra command options if any.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly is Optional: defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is Optional: secretRef is reference to the secret object containing\nsensitive information to pass to the plugin scripts. This may be\nempty if no secret object is specified. If the secret object\ncontains more than one secret, all secrets are passed to the plugin\nscripts.", + ) + + +class Flocker(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + datasetName: Optional[str] = Field( + default=None, + description="datasetName is Name of the dataset stored as metadata -> name on the dataset for Flocker\nshould be considered as deprecated", + ) + datasetUUID: Optional[str] = Field( + default=None, + description="datasetUUID is the UUID of the dataset. This is unique identifier of a Flocker dataset", + ) + + +class GcePersistentDisk(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + partition: Optional[int] = Field( + default=None, + description='partition is the partition in the volume that you want to mount.\nIf omitted, the default is to mount by volume name.\nExamples: For volume /dev/sda1, you specify the partition as "1".\nSimilarly, the volume partition for /dev/sda is "0" (or you can leave the property empty).\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk', + ) + pdName: str = Field( + ..., + description="pdName is unique name of the PD resource in GCE. Used to identify the disk in GCE.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the ReadOnly setting in VolumeMounts.\nDefaults to false.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", + ) + + +class GitRepo(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + directory: Optional[str] = Field( + default=None, + description="directory is the target directory name.\nMust not contain or start with '..'. If '.' is supplied, the volume directory will be the\ngit repository. Otherwise, if specified, the volume will contain the git repository in\nthe subdirectory with the given name.", + ) + repository: str = Field(..., description="repository is the URL") + revision: Optional[str] = Field( + default=None, + description="revision is the commit hash for the specified revision.", + ) + + +class Glusterfs(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + endpoints: str = Field( + ..., + description="endpoints is the endpoint name that details Glusterfs topology.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod", + ) + path: str = Field( + ..., + description="path is the Glusterfs volume path.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the Glusterfs volume to be mounted with read-only permissions.\nDefaults to false.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md#create-a-pod", + ) + + +class HostPath(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + path: str = Field( + ..., + description="path of the directory on the host.\nIf the path is a symlink, it will follow the link to the real path.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath", + ) + type: Optional[str] = Field( + default=None, + description='type for HostPath Volume\nDefaults to ""\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath', + ) + + +class Iscsi(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + chapAuthDiscovery: Optional[bool] = Field( + default=None, + description="chapAuthDiscovery defines whether support iSCSI Discovery CHAP authentication", + ) + chapAuthSession: Optional[bool] = Field( + default=None, + description="chapAuthSession defines whether support iSCSI Session CHAP authentication", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + initiatorName: Optional[str] = Field( + default=None, + description="initiatorName is the custom iSCSI Initiator Name.\nIf initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface\n: will be created for the connection.", + ) + iqn: str = Field(..., description="iqn is the target iSCSI Qualified Name.") + iscsiInterface: Optional[str] = Field( + default=None, + description="iscsiInterface is the interface Name that uses an iSCSI transport.\nDefaults to 'default' (tcp).", + ) + lun: int = Field(..., description="lun represents iSCSI Target Lun number.") + portals: Optional[List[str]] = Field( + default=None, + description="portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port\nis other than default (typically TCP ports 860 and 3260).", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the ReadOnly setting in VolumeMounts.\nDefaults to false.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is the CHAP Secret for iSCSI target and initiator authentication", + ) + targetPortal: str = Field( + ..., + description="targetPortal is iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port\nis other than default (typically TCP ports 860 and 3260).", + ) + + +class Nfs(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + path: str = Field( + ..., + description="path that is exported by the NFS server.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the NFS export to be mounted with read-only permissions.\nDefaults to false.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs", + ) + server: str = Field( + ..., + description="server is the hostname or IP address of the NFS server.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs", + ) + + +class PersistentVolumeClaim(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + claimName: str = Field( + ..., + description="claimName is the name of a PersistentVolumeClaim in the same namespace as the pod using this volume.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly Will force the ReadOnly setting in VolumeMounts.\nDefault false.", + ) + + +class PhotonPersistentDisk(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', + ) + pdID: str = Field( + ..., + description="pdID is the ID that identifies Photon Controller persistent disk", + ) + + +class PortworxVolume(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fSType represents the filesystem type to mount\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs". Implicitly inferred to be "ext4" if unspecified.', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + volumeID: str = Field( + ..., description="volumeID uniquely identifies a Portworx volume" + ) + + +class LabelSelector(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + matchExpressions: Optional[List[MatchExpression]] = Field( + default=None, + description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", + ) + matchLabels: Optional[Dict[str, str]] = Field( + default=None, + description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', + ) + + +class ClusterTrustBundle(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + labelSelector: Optional[LabelSelector] = Field( + default=None, + description='Select all ClusterTrustBundles that match this label selector. Only has\neffect if signerName is set. Mutually-exclusive with name. If unset,\ninterpreted as "match nothing". If set but empty, interpreted as "match\neverything".', + ) + name: Optional[str] = Field( + default=None, + description="Select a single ClusterTrustBundle by object name. Mutually-exclusive\nwith signerName and labelSelector.", + ) + optional: Optional[bool] = Field( + default=None, + description="If true, don't block pod startup if the referenced ClusterTrustBundle(s)\naren't available. If using name, then the named ClusterTrustBundle is\nallowed not to exist. If using signerName, then the combination of\nsignerName and labelSelector is allowed to match zero\nClusterTrustBundles.", + ) + path: str = Field( + ..., description="Relative path from the volume root to write the bundle." + ) + signerName: Optional[str] = Field( + default=None, + description="Select all ClusterTrustBundles that match this signer name.\nMutually-exclusive with name. The contents of all selected\nClusterTrustBundles will be unified and deduplicated.", + ) + + +class Item2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="key is the key to project.") + mode: Optional[int] = Field( + default=None, + description="mode is Optional: mode bits used to set permissions on this file.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="path is the relative path of the file to map the key to.\nMay not be an absolute path.\nMay not contain the path element '..'.\nMay not start with the string '..'.", + ) + + +class ConfigMap1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item2]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional specify whether the ConfigMap or its keys must be defined", + ) + + +class Item3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", + ) + mode: Optional[int] = Field( + default=None, + description="Optional: mode bits used to set permissions on this file, must be an octal value\nbetween 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", + ) + + +class DownwardAPI1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item3]] = Field( + default=None, description="Items is a list of DownwardAPIVolume file" + ) + + +class Item4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="key is the key to project.") + mode: Optional[int] = Field( + default=None, + description="mode is Optional: mode bits used to set permissions on this file.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="path is the relative path of the file to map the key to.\nMay not be an absolute path.\nMay not contain the path element '..'.\nMay not start with the string '..'.", + ) + + +class Secret(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item4]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional field specify whether the Secret or its key must be defined", + ) + + +class ServiceAccountToken(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + audience: Optional[str] = Field( + default=None, + description="audience is the intended audience of the token. A recipient of a token\nmust identify itself with an identifier specified in the audience of the\ntoken, and otherwise should reject the token. The audience defaults to the\nidentifier of the apiserver.", + ) + expirationSeconds: Optional[int] = Field( + default=None, + description="expirationSeconds is the requested duration of validity of the service\naccount token. As the token approaches expiration, the kubelet volume\nplugin will proactively rotate the service account token. The kubelet will\nstart trying to rotate the token if the token is older than 80 percent of\nits time to live or if the token is older than 24 hours.Defaults to 1 hour\nand must be at least 10 minutes.", + ) + path: str = Field( + ..., + description="path is the path relative to the mount point of the file to project the\ntoken into.", + ) + + +class Source1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + clusterTrustBundle: Optional[ClusterTrustBundle] = Field( + default=None, + description="ClusterTrustBundle allows a pod to access the `.spec.trustBundle` field\nof ClusterTrustBundle objects in an auto-updating file.\n\n\nAlpha, gated by the ClusterTrustBundleProjection feature gate.\n\n\nClusterTrustBundle objects can either be selected by name, or by the\ncombination of signer name and a label selector.\n\n\nKubelet performs aggressive normalization of the PEM contents written\ninto the pod filesystem. Esoteric PEM features such as inter-block\ncomments and block headers are stripped. Certificates are deduplicated.\nThe ordering of certificates within the file is arbitrary, and Kubelet\nmay change the order over time.", + ) + configMap: Optional[ConfigMap1] = Field( + default=None, + description="configMap information about the configMap data to project", + ) + downwardAPI: Optional[DownwardAPI1] = Field( + default=None, + description="downwardAPI information about the downwardAPI data to project", + ) + secret: Optional[Secret] = Field( + default=None, description="secret information about the secret data to project" + ) + serviceAccountToken: Optional[ServiceAccountToken] = Field( + default=None, + description="serviceAccountToken is information about the serviceAccountToken data to project", + ) + + +class Projected(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode are the mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + sources: Optional[List[Source1]] = Field( + default=None, description="sources is the list of volume projections" + ) + + +class Quobyte(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + group: Optional[str] = Field( + default=None, description="group to map volume access to\nDefault is no group" + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the Quobyte volume to be mounted with read-only permissions.\nDefaults to false.", + ) + registry: str = Field( + ..., + description="registry represents a single or multiple Quobyte Registry services\nspecified as a string as host:port pair (multiple entries are separated with commas)\nwhich acts as the central registry for volumes", + ) + tenant: Optional[str] = Field( + default=None, + description="tenant owning the given Quobyte volume in the Backend\nUsed with dynamically provisioned Quobyte volumes, value is set by the plugin", + ) + user: Optional[str] = Field( + default=None, + description="user to map volume access to\nDefaults to serivceaccount user", + ) + volume: str = Field( + ..., + description="volume is a string that references an already created Quobyte volume by name.", + ) + + +class Rbd(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#rbd\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + image: str = Field( + ..., + description="image is the rados image name.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + keyring: Optional[str] = Field( + default=None, + description="keyring is the path to key ring for RBDUser.\nDefault is /etc/ceph/keyring.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + monitors: List[str] = Field( + ..., + description="monitors is a collection of Ceph monitors.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + pool: Optional[str] = Field( + default=None, + description="pool is the rados pool name.\nDefault is rbd.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the ReadOnly setting in VolumeMounts.\nDefaults to false.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is name of the authentication secret for RBDUser. If provided\noverrides keyring.\nDefault is nil.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + user: Optional[str] = Field( + default=None, + description="user is the rados user name.\nDefault is admin.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + + +class ScaleIO(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs".\nDefault is "xfs".', + ) + gateway: str = Field( + ..., description="gateway is the host address of the ScaleIO API Gateway." + ) + protectionDomain: Optional[str] = Field( + default=None, + description="protectionDomain is the name of the ScaleIO Protection Domain for the configured storage.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: SecretRef = Field( + ..., + description="secretRef references to the secret for ScaleIO user and other\nsensitive information. If this is not provided, Login operation will fail.", + ) + sslEnabled: Optional[bool] = Field( + default=None, + description="sslEnabled Flag enable/disable SSL communication with Gateway, default false", + ) + storageMode: Optional[str] = Field( + default=None, + description="storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned.\nDefault is ThinProvisioned.", + ) + storagePool: Optional[str] = Field( + default=None, + description="storagePool is the ScaleIO Storage Pool associated with the protection domain.", + ) + system: str = Field( + ..., + description="system is the name of the storage system as configured in ScaleIO.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the name of a volume already created in the ScaleIO system\nthat is associated with this volume source.", + ) + + +class Secret1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode is Optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values\nfor mode bits. Defaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item4]] = Field( + default=None, + description="items If unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional field specify whether the Secret or its keys must be defined", + ) + secretName: Optional[str] = Field( + default=None, + description="secretName is the name of the secret in the pod's namespace to use.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret", + ) + + +class Storageos(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef specifies the secret to use for obtaining the StorageOS API\ncredentials. If not specified, default values will be attempted.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the human-readable name of the StorageOS volume. Volume\nnames are only unique within a namespace.", + ) + volumeNamespace: Optional[str] = Field( + default=None, + description='volumeNamespace specifies the scope of the volume within StorageOS. If no\nnamespace is specified then the Pod\'s namespace will be used. This allows the\nKubernetes name scoping to be mirrored within StorageOS for tighter integration.\nSet VolumeName to any name to override the default behaviour.\nSet to "default" if you are not using namespaces within StorageOS.\nNamespaces that do not pre-exist within StorageOS will be created.', + ) + + +class VsphereVolume(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', + ) + storagePolicyID: Optional[str] = Field( + default=None, + description="storagePolicyID is the storage Policy Based Management (SPBM) profile ID associated with the StoragePolicyName.", + ) + storagePolicyName: Optional[str] = Field( + default=None, + description="storagePolicyName is the storage Policy Based Management (SPBM) profile name.", + ) + volumePath: str = Field( + ..., description="volumePath is the path that identifies vSphere volume vmdk" + ) + + +class Volume(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + awsElasticBlockStore: Optional[AwsElasticBlockStore] = Field( + default=None, + description="awsElasticBlockStore represents an AWS Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", + ) + azureDisk: Optional[AzureDisk] = Field( + default=None, + description="azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.", + ) + azureFile: Optional[AzureFile] = Field( + default=None, + description="azureFile represents an Azure File Service mount on the host and bind mount to the pod.", + ) + cephfs: Optional[Cephfs] = Field( + default=None, + description="cephFS represents a Ceph FS mount on the host that shares a pod's lifetime", + ) + cinder: Optional[Cinder] = Field( + default=None, + description="cinder represents a cinder volume attached and mounted on kubelets host machine.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + configMap: Optional[ConfigMap] = Field( + default=None, + description="configMap represents a configMap that should populate this volume", + ) + csi: Optional[Csi] = Field( + default=None, + description="csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).", + ) + downwardAPI: Optional[DownwardAPI] = Field( + default=None, + description="downwardAPI represents downward API about the pod that should populate this volume", + ) + emptyDir: Optional[EmptyDir] = Field( + default=None, + description="emptyDir represents a temporary directory that shares a pod's lifetime.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir", + ) + ephemeral: Optional[Ephemeral] = Field( + default=None, + description="ephemeral represents a volume that is handled by a cluster storage driver.\nThe volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts,\nand deleted when the pod is removed.\n\n\nUse this if:\na) the volume is only needed while the pod runs,\nb) features of normal volumes like restoring from snapshot or capacity\n tracking are needed,\nc) the storage driver is specified through a storage class, and\nd) the storage driver supports dynamic volume provisioning through\n a PersistentVolumeClaim (see EphemeralVolumeSource for more\n information on the connection between this volume type\n and PersistentVolumeClaim).\n\n\nUse PersistentVolumeClaim or one of the vendor-specific\nAPIs for volumes that persist for longer than the lifecycle\nof an individual pod.\n\n\nUse CSI for light-weight local ephemeral volumes if the CSI driver is meant to\nbe used that way - see the documentation of the driver for\nmore information.\n\n\nA pod can use both types of ephemeral volumes and\npersistent volumes at the same time.", + ) + fc: Optional[Fc] = Field( + default=None, + description="fc represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.", + ) + flexVolume: Optional[FlexVolume] = Field( + default=None, + description="flexVolume represents a generic volume resource that is\nprovisioned/attached using an exec based plugin.", + ) + flocker: Optional[Flocker] = Field( + default=None, + description="flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running", + ) + gcePersistentDisk: Optional[GcePersistentDisk] = Field( + default=None, + description="gcePersistentDisk represents a GCE Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", + ) + gitRepo: Optional[GitRepo] = Field( + default=None, + description="gitRepo represents a git repository at a particular revision.\nDEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an\nEmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir\ninto the Pod's container.", + ) + glusterfs: Optional[Glusterfs] = Field( + default=None, + description="glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md", + ) + hostPath: Optional[HostPath] = Field( + default=None, + description="hostPath represents a pre-existing file or directory on the host\nmachine that is directly exposed to the container. This is generally\nused for system agents or other privileged things that are allowed\nto see the host machine. Most containers will NOT need this.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath\n---\nTODO(jonesdl) We need to restrict who can use host directory mounts and who can/can not\nmount host directories as read/write.", + ) + iscsi: Optional[Iscsi] = Field( + default=None, + description="iscsi represents an ISCSI Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://examples.k8s.io/volumes/iscsi/README.md", + ) + name: str = Field(..., description="Name of the Build Volume") + nfs: Optional[Nfs] = Field( + default=None, + description="nfs represents an NFS mount on the host that shares a pod's lifetime\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs", + ) + persistentVolumeClaim: Optional[PersistentVolumeClaim] = Field( + default=None, + description="persistentVolumeClaimVolumeSource represents a reference to a\nPersistentVolumeClaim in the same namespace.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims", + ) + photonPersistentDisk: Optional[PhotonPersistentDisk] = Field( + default=None, + description="photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine", + ) + portworxVolume: Optional[PortworxVolume] = Field( + default=None, + description="portworxVolume represents a portworx volume attached and mounted on kubelets host machine", + ) + projected: Optional[Projected] = Field( + default=None, + description="projected items for all in one resources secrets, configmaps, and downward API", + ) + quobyte: Optional[Quobyte] = Field( + default=None, + description="quobyte represents a Quobyte mount on the host that shares a pod's lifetime", + ) + rbd: Optional[Rbd] = Field( + default=None, + description="rbd represents a Rados Block Device mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/rbd/README.md", + ) + scaleIO: Optional[ScaleIO] = Field( + default=None, + description="scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.", + ) + secret: Optional[Secret1] = Field( + default=None, + description="secret represents a secret that should populate this volume.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret", + ) + storageos: Optional[Storageos] = Field( + default=None, + description="storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.", + ) + vsphereVolume: Optional[VsphereVolume] = Field( + default=None, + description="vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine", + ) + + +class Spec1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + env: Optional[List[EnvItem]] = Field( + default=None, + description="Env contains additional environment variables that should be passed to the build container", + ) + nodeSelector: Optional[Dict[str, str]] = Field( + default=None, + description="NodeSelector is a selector which must be true for the pod to fit on a node.\nSelector which must match a node's labels for the pod to be scheduled on that node.\nMore info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/", + ) + output: Output = Field( + ..., + description="Output refers to the location where the built image would be pushed.", + ) + paramValues: Optional[List[ParamValue]] = Field( + default=None, + description="Params is a list of key/value that could be used\nto set strategy parameters", + ) + retention: Optional[Retention] = Field( + default=None, description="Contains information about retention params" + ) + schedulerName: Optional[str] = Field( + default=None, + description="SchedulerName specifies the scheduler to be used to dispatch the Pod", + ) + source: Optional[Source] = Field( + default=None, + description="Source refers to the location where the source code is,\nthis could be a git repository, a local source or an oci\nartifact", + ) + strategy: Strategy = Field( + ..., + description="Strategy references the BuildStrategy to use to build the container\nimage.", + ) + timeout: Optional[str] = Field( + default=None, + description="Timeout defines the maximum amount of time the Build should take to execute.", + ) + tolerations: Optional[List[Toleration]] = Field( + default=None, description="If specified, the pod's tolerations." + ) + trigger: Optional[Trigger] = Field( + default=None, + description="Trigger defines the scenarios where a new build should be triggered.", + ) + volumes: Optional[List[Volume]] = Field( + default=None, + description="Volumes contains volume Overrides of the BuildStrategy volumes in case those are allowed\nto be overridden. Must only contain volumes that exist in the corresponding BuildStrategy", + ) + + +class Build(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: Optional[str] = Field( + default=None, + description="Name of the referent; More info: http://kubernetes.io/docs/user-guide/identifiers#names", + ) + spec: Optional[Spec1] = Field( + default=None, description="Spec refers to an embedded build specification" + ) + + +class ValueFrom1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapKeyRef: Optional[ConfigMapKeyRef] = Field( + default=None, description="Selects a key of a ConfigMap." + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Selects a field of the pod: supports metadata.name, metadata.namespace, `metadata.labels['']`, `metadata.annotations['']`,\nspec.nodeName, spec.serviceAccountName, status.hostIP, status.podIP, status.podIPs.", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, limits.ephemeral-storage, requests.cpu, requests.memory and requests.ephemeral-storage) are currently supported.", + ) + secretKeyRef: Optional[SecretKeyRef] = Field( + default=None, description="Selects a key of a secret in the pod's namespace" + ) + + +class EnvItem1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: str = Field( + ..., description="Name of the environment variable. Must be a C_IDENTIFIER." + ) + value: Optional[str] = Field( + default=None, + description='Variable references $(VAR_NAME) are expanded\nusing the previously defined environment variables in the container and\nany service environment variables. If a variable cannot be resolved,\nthe reference in the input string will be unchanged. Double $$ are reduced\nto a single $, which allows for escaping the $(VAR_NAME) syntax: i.e.\n"$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)".\nEscaped references will never be expanded, regardless of whether the variable\nexists or not.\nDefaults to "".', + ) + valueFrom: Optional[ValueFrom1] = Field( + default=None, + description="Source for the environment variable's value. Cannot be used if value is not empty.", + ) + + +class Ignore1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + id: Optional[List[str]] = Field( + default=None, + description="ID references the security issues to be ignored in vulnerability scan", + ) + severity: Optional[Severity] = Field( + default=None, + description='Severity denotes the severity levels of security issues to be ignored, valid values are:\n- "low": it will exclude low severity vulnerabilities, displaying only medium, high and critical vulnerabilities\n- "medium": it will exclude low and medium severity vulnerabilities, displaying only high and critical vulnerabilities\n- "high": it will exclude low, medium and high severity vulnerabilities, displaying only the critical vulnerabilities', + ) + unfixed: Optional[bool] = Field( + default=None, + description="Unfixed indicates to ignore vulnerabilities for which no fix exists", + ) + + +class VulnerabilityScan1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + enabled: Optional[bool] = Field( + default=None, + description="Enabled indicates whether to run vulnerability scan for image", + ) + failOnFinding: Optional[bool] = Field( + default=None, + description="FailOnFinding indicates whether to fail the build run if the vulnerability scan results in vulnerabilities", + ) + ignore: Optional[Ignore1] = Field( + default=None, + description="Ignore refers to ignore options for vulnerability scan", + ) + + +class Output1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + annotations: Optional[Dict[str, str]] = Field( + default=None, + description="Annotations references the additional annotations to be applied on the image", + ) + image: str = Field(..., description="Image is the reference of the image.") + insecure: Optional[bool] = Field( + default=None, description="Insecure defines whether the registry is not secure" + ) + labels: Optional[Dict[str, str]] = Field( + default=None, + description="Labels references the additional labels to be applied on the image", + ) + pushSecret: Optional[str] = Field( + default=None, + description="Describes the secret name for pushing a container image.", + ) + timestamp: Optional[str] = Field( + default=None, + description='Timestamp references the optional image timestamp to be set, valid values are:\n- "Zero", to set 00:00:00 UTC on 1 January 1970\n- "SourceTimestamp", to set the source timestamp dereived from the input source\n- "BuildTimestamp", to set the timestamp of the current build itself\n- Parsable integer number defined as the epoch seconds\n- or nil/empty to not set any specific timestamp', + ) + vulnerabilityScan: Optional[VulnerabilityScan1] = Field( + default=None, + description="VulnerabilityScan provides configurations about running a scan for your generated image", + ) + + +class Value1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapValue: Optional[ConfigMapValue] = Field( + default=None, description="The ConfigMap value of the parameter" + ) + secretValue: Optional[SecretValue] = Field( + default=None, description="The secret value of the parameter" + ) + value: Optional[str] = Field(default=None, description="The value of the parameter") + + +class ParamValue1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapValue: Optional[ConfigMapValue] = Field( + default=None, description="The ConfigMap value of the parameter" + ) + name: str = Field(..., description="Name of the parameter") + secretValue: Optional[SecretValue] = Field( + default=None, description="The secret value of the parameter" + ) + value: Optional[str] = Field(default=None, description="The value of the parameter") + values: Optional[List[Value1]] = Field( + default=None, description="Values of an array parameter" + ) + + +class Retention1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + ttlAfterFailed: Optional[str] = Field( + default=None, + description="TTLAfterFailed defines the maximum duration of time the failed buildrun should exist.", + ) + ttlAfterSucceeded: Optional[str] = Field( + default=None, + description="TTLAfterSucceeded defines the maximum duration of time the succeeded buildrun should exist.", + ) + + +class Source2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + local: Optional[Local] = Field( + default=None, + description="Local contains the details for the source of type Local", + ) + type: str = Field( + ..., + description="Type is the BuildRunSource qualifier, the type of the source.\nOnly Local is supported.", + ) + + +class Cephfs1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + monitors: List[str] = Field( + ..., + description="monitors is Required: Monitors is a collection of Ceph monitors\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + path: Optional[str] = Field( + default=None, + description="path is Optional: Used as the mounted root, rather than the full Ceph tree, default is /", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly is Optional: Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + secretFile: Optional[str] = Field( + default=None, + description="secretFile is Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is Optional: SecretRef is reference to the authentication secret for User, default is empty.\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + user: Optional[str] = Field( + default=None, + description="user is optional: User is the rados user name, default is admin\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + + +class Cinder1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is optional: points to a secret object containing parameters used to connect\nto OpenStack.", + ) + volumeID: str = Field( + ..., + description="volumeID used to identify the volume in cinder.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + + +class ConfigMap2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode is optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item4]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional specify whether the ConfigMap or its keys must be defined", + ) + + +class Csi1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + driver: str = Field( + ..., + description="driver is the name of the CSI driver that handles this volume.\nConsult with your admin for the correct name as registered in the cluster.", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType to mount. Ex. "ext4", "xfs", "ntfs".\nIf not provided, the empty value is passed to the associated CSI driver\nwhich will determine the default filesystem to apply.', + ) + nodePublishSecretRef: Optional[NodePublishSecretRef] = Field( + default=None, + description="nodePublishSecretRef is a reference to the secret object containing\nsensitive information to pass to the CSI driver to complete the CSI\nNodePublishVolume and NodeUnpublishVolume calls.\nThis field is optional, and may be empty if no secret is required. If the\nsecret object contains more than one secret, all secret references are passed.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly specifies a read-only configuration for the volume.\nDefaults to false (read/write).", + ) + volumeAttributes: Optional[Dict[str, str]] = Field( + default=None, + description="volumeAttributes stores driver-specific properties that are passed to the CSI\ndriver. Consult your driver's documentation for supported values.", + ) + + +class Item7(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", + ) + mode: Optional[int] = Field( + default=None, + description="Optional: mode bits used to set permissions on this file, must be an octal value\nbetween 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", + ) + + +class DownwardAPI2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="Optional: mode bits to use on created files by default. Must be a\nOptional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item7]] = Field( + default=None, description="Items is a list of downward API volume file" + ) + + +class Selector1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + matchExpressions: Optional[List[MatchExpression]] = Field( + default=None, + description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", + ) + matchLabels: Optional[Dict[str, str]] = Field( + default=None, + description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', + ) + + +class Spec3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + accessModes: Optional[List[str]] = Field( + default=None, + description="accessModes contains the desired access modes the volume should have.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1", + ) + dataSource: Optional[DataSource] = Field( + default=None, + description="dataSource field can be used to specify either:\n* An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot)\n* An existing PVC (PersistentVolumeClaim)\nIf the provisioner or an external controller can support the specified data source,\nit will create a new volume based on the contents of the specified data source.\nWhen the AnyVolumeDataSource feature gate is enabled, dataSource contents will be copied to dataSourceRef,\nand dataSourceRef contents will be copied to dataSource when dataSourceRef.namespace is not specified.\nIf the namespace is specified, then dataSourceRef will not be copied to dataSource.", + ) + dataSourceRef: Optional[DataSourceRef] = Field( + default=None, + description="dataSourceRef specifies the object from which to populate the volume with data, if a non-empty\nvolume is desired. This may be any object from a non-empty API group (non\ncore object) or a PersistentVolumeClaim object.\nWhen this field is specified, volume binding will only succeed if the type of\nthe specified object matches some installed volume populator or dynamic\nprovisioner.\nThis field will replace the functionality of the dataSource field and as such\nif both fields are non-empty, they must have the same value. For backwards\ncompatibility, when namespace isn't specified in dataSourceRef,\nboth fields (dataSource and dataSourceRef) will be set to the same\nvalue automatically if one of them is empty and the other is non-empty.\nWhen namespace is specified in dataSourceRef,\ndataSource isn't set to the same value and must be empty.\nThere are three important differences between dataSource and dataSourceRef:\n* While dataSource only allows two specific types of objects, dataSourceRef\n allows any non-core object, as well as PersistentVolumeClaim objects.\n* While dataSource ignores disallowed values (dropping them), dataSourceRef\n preserves all values, and generates an error if a disallowed value is\n specified.\n* While dataSource only allows local objects, dataSourceRef allows objects\n in any namespaces.\n(Beta) Using this field requires the AnyVolumeDataSource feature gate to be enabled.\n(Alpha) Using the namespace field of dataSourceRef requires the CrossNamespaceVolumeDataSource feature gate to be enabled.", + ) + resources: Optional[Resources] = Field( + default=None, + description="resources represents the minimum resources the volume should have.\nIf RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements\nthat are lower than previous value but must still be higher than capacity recorded in the\nstatus field of the claim.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources", + ) + selector: Optional[Selector1] = Field( + default=None, + description="selector is a label query over volumes to consider for binding.", + ) + storageClassName: Optional[str] = Field( + default=None, + description="storageClassName is the name of the StorageClass required by the claim.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1", + ) + volumeAttributesClassName: Optional[str] = Field( + default=None, + description="volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim.\nIf specified, the CSI driver will create or update the volume with the attributes defined\nin the corresponding VolumeAttributesClass. This has a different purpose than storageClassName,\nit can be changed after the claim is created. An empty string value means that no VolumeAttributesClass\nwill be applied to the claim but it's not allowed to reset this field to empty string once it is set.\nIf unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass\nwill be set by the persistentvolume controller if it exists.\nIf the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be\nset to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource\nexists.\nMore info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/\n(Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.", + ) + volumeMode: Optional[str] = Field( + default=None, + description="volumeMode defines what type of volume is required by the claim.\nValue of Filesystem is implied when not included in claim spec.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the binding reference to the PersistentVolume backing this claim.", + ) + + +class VolumeClaimTemplate1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + metadata: Optional[Dict[str, Any]] = Field( + default=None, + description="May contain labels and annotations that will be copied into the PVC\nwhen creating it. No other fields are allowed and will be rejected during\nvalidation.", + ) + spec: Spec3 = Field( + ..., + description="The specification for the PersistentVolumeClaim. The entire content is\ncopied unchanged into the PVC that gets created from this\ntemplate. The same fields as in a PersistentVolumeClaim\nare also valid here.", + ) + + +class Ephemeral1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + volumeClaimTemplate: Optional[VolumeClaimTemplate1] = Field( + default=None, + description="Will be used to create a stand-alone PVC to provision the volume.\nThe pod in which this EphemeralVolumeSource is embedded will be the\nowner of the PVC, i.e. the PVC will be deleted together with the\npod. The name of the PVC will be `-` where\n`` is the name from the `PodSpec.Volumes` array\nentry. Pod validation will reject the pod if the concatenated name\nis not valid for a PVC (for example, too long).\n\n\nAn existing PVC with that name that is not owned by the pod\nwill *not* be used for the pod to avoid using an unrelated\nvolume by mistake. Starting the pod is then blocked until\nthe unrelated PVC is removed. If such a pre-created PVC is\nmeant to be used by the pod, the PVC has to updated with an\nowner reference to the pod once the pod exists. Normally\nthis should not be necessary, but it may be useful when\nmanually reconstructing a broken cluster.\n\n\nThis field is read-only and no changes will be made by Kubernetes\nto the PVC after it has been created.\n\n\nRequired, must not be nil.", + ) + + +class FlexVolume1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + driver: str = Field( + ..., description="driver is the name of the driver to use for this volume." + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script.', + ) + options: Optional[Dict[str, str]] = Field( + default=None, + description="options is Optional: this field holds extra command options if any.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly is Optional: defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is Optional: secretRef is reference to the secret object containing\nsensitive information to pass to the plugin scripts. This may be\nempty if no secret object is specified. If the secret object\ncontains more than one secret, all secrets are passed to the plugin\nscripts.", + ) + + +class Iscsi1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + chapAuthDiscovery: Optional[bool] = Field( + default=None, + description="chapAuthDiscovery defines whether support iSCSI Discovery CHAP authentication", + ) + chapAuthSession: Optional[bool] = Field( + default=None, + description="chapAuthSession defines whether support iSCSI Session CHAP authentication", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + initiatorName: Optional[str] = Field( + default=None, + description="initiatorName is the custom iSCSI Initiator Name.\nIf initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface\n: will be created for the connection.", + ) + iqn: str = Field(..., description="iqn is the target iSCSI Qualified Name.") + iscsiInterface: Optional[str] = Field( + default=None, + description="iscsiInterface is the interface Name that uses an iSCSI transport.\nDefaults to 'default' (tcp).", + ) + lun: int = Field(..., description="lun represents iSCSI Target Lun number.") + portals: Optional[List[str]] = Field( + default=None, + description="portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port\nis other than default (typically TCP ports 860 and 3260).", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the ReadOnly setting in VolumeMounts.\nDefaults to false.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is the CHAP Secret for iSCSI target and initiator authentication", + ) + targetPortal: str = Field( + ..., + description="targetPortal is iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port\nis other than default (typically TCP ports 860 and 3260).", + ) + + +class LabelSelector1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + matchExpressions: Optional[List[MatchExpression]] = Field( + default=None, + description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", + ) + matchLabels: Optional[Dict[str, str]] = Field( + default=None, + description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', + ) + + +class ClusterTrustBundle1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + labelSelector: Optional[LabelSelector1] = Field( + default=None, + description='Select all ClusterTrustBundles that match this label selector. Only has\neffect if signerName is set. Mutually-exclusive with name. If unset,\ninterpreted as "match nothing". If set but empty, interpreted as "match\neverything".', + ) + name: Optional[str] = Field( + default=None, + description="Select a single ClusterTrustBundle by object name. Mutually-exclusive\nwith signerName and labelSelector.", + ) + optional: Optional[bool] = Field( + default=None, + description="If true, don't block pod startup if the referenced ClusterTrustBundle(s)\naren't available. If using name, then the named ClusterTrustBundle is\nallowed not to exist. If using signerName, then the combination of\nsignerName and labelSelector is allowed to match zero\nClusterTrustBundles.", + ) + path: str = Field( + ..., description="Relative path from the volume root to write the bundle." + ) + signerName: Optional[str] = Field( + default=None, + description="Select all ClusterTrustBundles that match this signer name.\nMutually-exclusive with name. The contents of all selected\nClusterTrustBundles will be unified and deduplicated.", + ) + + +class Item8(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="key is the key to project.") + mode: Optional[int] = Field( + default=None, + description="mode is Optional: mode bits used to set permissions on this file.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="path is the relative path of the file to map the key to.\nMay not be an absolute path.\nMay not contain the path element '..'.\nMay not start with the string '..'.", + ) + + +class ConfigMap3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item8]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional specify whether the ConfigMap or its keys must be defined", + ) + + +class Item9(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", + ) + mode: Optional[int] = Field( + default=None, + description="Optional: mode bits used to set permissions on this file, must be an octal value\nbetween 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", + ) + + +class DownwardAPI3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item9]] = Field( + default=None, description="Items is a list of DownwardAPIVolume file" + ) + + +class Item10(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="key is the key to project.") + mode: Optional[int] = Field( + default=None, + description="mode is Optional: mode bits used to set permissions on this file.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="path is the relative path of the file to map the key to.\nMay not be an absolute path.\nMay not contain the path element '..'.\nMay not start with the string '..'.", + ) + + +class Secret2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item10]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional field specify whether the Secret or its key must be defined", + ) + + +class Source3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + clusterTrustBundle: Optional[ClusterTrustBundle1] = Field( + default=None, + description="ClusterTrustBundle allows a pod to access the `.spec.trustBundle` field\nof ClusterTrustBundle objects in an auto-updating file.\n\n\nAlpha, gated by the ClusterTrustBundleProjection feature gate.\n\n\nClusterTrustBundle objects can either be selected by name, or by the\ncombination of signer name and a label selector.\n\n\nKubelet performs aggressive normalization of the PEM contents written\ninto the pod filesystem. Esoteric PEM features such as inter-block\ncomments and block headers are stripped. Certificates are deduplicated.\nThe ordering of certificates within the file is arbitrary, and Kubelet\nmay change the order over time.", + ) + configMap: Optional[ConfigMap3] = Field( + default=None, + description="configMap information about the configMap data to project", + ) + downwardAPI: Optional[DownwardAPI3] = Field( + default=None, + description="downwardAPI information about the downwardAPI data to project", + ) + secret: Optional[Secret2] = Field( + default=None, description="secret information about the secret data to project" + ) + serviceAccountToken: Optional[ServiceAccountToken] = Field( + default=None, + description="serviceAccountToken is information about the serviceAccountToken data to project", + ) + + +class Projected1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode are the mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + sources: Optional[List[Source3]] = Field( + default=None, description="sources is the list of volume projections" + ) + + +class Rbd1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#rbd\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + image: str = Field( + ..., + description="image is the rados image name.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + keyring: Optional[str] = Field( + default=None, + description="keyring is the path to key ring for RBDUser.\nDefault is /etc/ceph/keyring.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + monitors: List[str] = Field( + ..., + description="monitors is a collection of Ceph monitors.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + pool: Optional[str] = Field( + default=None, + description="pool is the rados pool name.\nDefault is rbd.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the ReadOnly setting in VolumeMounts.\nDefaults to false.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is name of the authentication secret for RBDUser. If provided\noverrides keyring.\nDefault is nil.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + user: Optional[str] = Field( + default=None, + description="user is the rados user name.\nDefault is admin.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + + +class ScaleIO1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs".\nDefault is "xfs".', + ) + gateway: str = Field( + ..., description="gateway is the host address of the ScaleIO API Gateway." + ) + protectionDomain: Optional[str] = Field( + default=None, + description="protectionDomain is the name of the ScaleIO Protection Domain for the configured storage.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: SecretRef = Field( + ..., + description="secretRef references to the secret for ScaleIO user and other\nsensitive information. If this is not provided, Login operation will fail.", + ) + sslEnabled: Optional[bool] = Field( + default=None, + description="sslEnabled Flag enable/disable SSL communication with Gateway, default false", + ) + storageMode: Optional[str] = Field( + default=None, + description="storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned.\nDefault is ThinProvisioned.", + ) + storagePool: Optional[str] = Field( + default=None, + description="storagePool is the ScaleIO Storage Pool associated with the protection domain.", + ) + system: str = Field( + ..., + description="system is the name of the storage system as configured in ScaleIO.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the name of a volume already created in the ScaleIO system\nthat is associated with this volume source.", + ) + + +class Secret3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode is Optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values\nfor mode bits. Defaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item10]] = Field( + default=None, + description="items If unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional field specify whether the Secret or its keys must be defined", + ) + secretName: Optional[str] = Field( + default=None, + description="secretName is the name of the secret in the pod's namespace to use.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret", + ) + + +class Storageos1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef specifies the secret to use for obtaining the StorageOS API\ncredentials. If not specified, default values will be attempted.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the human-readable name of the StorageOS volume. Volume\nnames are only unique within a namespace.", + ) + volumeNamespace: Optional[str] = Field( + default=None, + description='volumeNamespace specifies the scope of the volume within StorageOS. If no\nnamespace is specified then the Pod\'s namespace will be used. This allows the\nKubernetes name scoping to be mirrored within StorageOS for tighter integration.\nSet VolumeName to any name to override the default behaviour.\nSet to "default" if you are not using namespaces within StorageOS.\nNamespaces that do not pre-exist within StorageOS will be created.', + ) + + +class Volume1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + awsElasticBlockStore: Optional[AwsElasticBlockStore] = Field( + default=None, + description="awsElasticBlockStore represents an AWS Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", + ) + azureDisk: Optional[AzureDisk] = Field( + default=None, + description="azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.", + ) + azureFile: Optional[AzureFile] = Field( + default=None, + description="azureFile represents an Azure File Service mount on the host and bind mount to the pod.", + ) + cephfs: Optional[Cephfs1] = Field( + default=None, + description="cephFS represents a Ceph FS mount on the host that shares a pod's lifetime", + ) + cinder: Optional[Cinder1] = Field( + default=None, + description="cinder represents a cinder volume attached and mounted on kubelets host machine.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + configMap: Optional[ConfigMap2] = Field( + default=None, + description="configMap represents a configMap that should populate this volume", + ) + csi: Optional[Csi1] = Field( + default=None, + description="csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).", + ) + downwardAPI: Optional[DownwardAPI2] = Field( + default=None, + description="downwardAPI represents downward API about the pod that should populate this volume", + ) + emptyDir: Optional[EmptyDir] = Field( + default=None, + description="emptyDir represents a temporary directory that shares a pod's lifetime.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir", + ) + ephemeral: Optional[Ephemeral1] = Field( + default=None, + description="ephemeral represents a volume that is handled by a cluster storage driver.\nThe volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts,\nand deleted when the pod is removed.\n\n\nUse this if:\na) the volume is only needed while the pod runs,\nb) features of normal volumes like restoring from snapshot or capacity\n tracking are needed,\nc) the storage driver is specified through a storage class, and\nd) the storage driver supports dynamic volume provisioning through\n a PersistentVolumeClaim (see EphemeralVolumeSource for more\n information on the connection between this volume type\n and PersistentVolumeClaim).\n\n\nUse PersistentVolumeClaim or one of the vendor-specific\nAPIs for volumes that persist for longer than the lifecycle\nof an individual pod.\n\n\nUse CSI for light-weight local ephemeral volumes if the CSI driver is meant to\nbe used that way - see the documentation of the driver for\nmore information.\n\n\nA pod can use both types of ephemeral volumes and\npersistent volumes at the same time.", + ) + fc: Optional[Fc] = Field( + default=None, + description="fc represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.", + ) + flexVolume: Optional[FlexVolume1] = Field( + default=None, + description="flexVolume represents a generic volume resource that is\nprovisioned/attached using an exec based plugin.", + ) + flocker: Optional[Flocker] = Field( + default=None, + description="flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running", + ) + gcePersistentDisk: Optional[GcePersistentDisk] = Field( + default=None, + description="gcePersistentDisk represents a GCE Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", + ) + gitRepo: Optional[GitRepo] = Field( + default=None, + description="gitRepo represents a git repository at a particular revision.\nDEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an\nEmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir\ninto the Pod's container.", + ) + glusterfs: Optional[Glusterfs] = Field( + default=None, + description="glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md", + ) + hostPath: Optional[HostPath] = Field( + default=None, + description="hostPath represents a pre-existing file or directory on the host\nmachine that is directly exposed to the container. This is generally\nused for system agents or other privileged things that are allowed\nto see the host machine. Most containers will NOT need this.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath\n---\nTODO(jonesdl) We need to restrict who can use host directory mounts and who can/can not\nmount host directories as read/write.", + ) + iscsi: Optional[Iscsi1] = Field( + default=None, + description="iscsi represents an ISCSI Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://examples.k8s.io/volumes/iscsi/README.md", + ) + name: str = Field(..., description="Name of the Build Volume") + nfs: Optional[Nfs] = Field( + default=None, + description="nfs represents an NFS mount on the host that shares a pod's lifetime\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs", + ) + persistentVolumeClaim: Optional[PersistentVolumeClaim] = Field( + default=None, + description="persistentVolumeClaimVolumeSource represents a reference to a\nPersistentVolumeClaim in the same namespace.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims", + ) + photonPersistentDisk: Optional[PhotonPersistentDisk] = Field( + default=None, + description="photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine", + ) + portworxVolume: Optional[PortworxVolume] = Field( + default=None, + description="portworxVolume represents a portworx volume attached and mounted on kubelets host machine", + ) + projected: Optional[Projected1] = Field( + default=None, + description="projected items for all in one resources secrets, configmaps, and downward API", + ) + quobyte: Optional[Quobyte] = Field( + default=None, + description="quobyte represents a Quobyte mount on the host that shares a pod's lifetime", + ) + rbd: Optional[Rbd1] = Field( + default=None, + description="rbd represents a Rados Block Device mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/rbd/README.md", + ) + scaleIO: Optional[ScaleIO1] = Field( + default=None, + description="scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.", + ) + secret: Optional[Secret3] = Field( + default=None, + description="secret represents a secret that should populate this volume.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret", + ) + storageos: Optional[Storageos1] = Field( + default=None, + description="storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.", + ) + vsphereVolume: Optional[VsphereVolume] = Field( + default=None, + description="vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine", + ) + + +class Spec(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + build: Build = Field( + ..., + description="Build refers to an embedded build specification\nThis field is mandatory", + ) + env: Optional[List[EnvItem1]] = Field( + default=None, + description="Env contains additional environment variables that should be passed to the build container", + ) + nodeSelector: Optional[Dict[str, str]] = Field( + default=None, + description="NodeSelector is a selector which must be true for the pod to fit on a node.\nSelector which must match a node's labels for the pod to be scheduled on that node.\nMore info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/", + ) + output: Optional[Output1] = Field( + default=None, + description="Output refers to the location where the generated\nimage would be pushed to. It will overwrite the output image in build spec", + ) + paramValues: Optional[List[ParamValue1]] = Field( + default=None, + description="Params is a list of key/value that could be used\nto set strategy parameters", + ) + retention: Optional[Retention1] = Field( + default=None, description="Contains information about retention params" + ) + schedulerName: Optional[str] = Field( + default=None, + description="SchedulerName specifies the scheduler to be used to dispatch the Pod", + ) + serviceAccount: Optional[str] = Field( + default=None, + description="ServiceAccount refers to the kubernetes serviceaccount\nwhich is used for resource control.\nDefault serviceaccount will be set if it is empty", + ) + source: Optional[Source2] = Field( + default=None, + description="Source refers to the location where the source code is,\nthis could only be a local source", + ) + state: Optional[str] = Field( + default=None, + description="State is used for canceling a buildrun (and maybe more later on).", + ) + timeout: Optional[str] = Field( + default=None, + description="Timeout defines the maximum run time of this BuildRun.", + ) + tolerations: Optional[List[Toleration]] = Field( + default=None, description="If specified, the pod's tolerations." + ) + volumes: Optional[List[Volume1]] = Field( + default=None, + description="Volumes contains volume Overrides of the BuildStrategy volumes in case those are allowed\nto be overridden. Must only contain volumes that exist in the corresponding BuildStrategy", + ) + + +class ValueFrom2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapKeyRef: Optional[ConfigMapKeyRef] = Field( + default=None, description="Selects a key of a ConfigMap." + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Selects a field of the pod: supports metadata.name, metadata.namespace, `metadata.labels['']`, `metadata.annotations['']`,\nspec.nodeName, spec.serviceAccountName, status.hostIP, status.podIP, status.podIPs.", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, limits.ephemeral-storage, requests.cpu, requests.memory and requests.ephemeral-storage) are currently supported.", + ) + secretKeyRef: Optional[SecretKeyRef] = Field( + default=None, description="Selects a key of a secret in the pod's namespace" + ) + + +class EnvItem2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + name: str = Field( + ..., description="Name of the environment variable. Must be a C_IDENTIFIER." + ) + value: Optional[str] = Field( + default=None, + description='Variable references $(VAR_NAME) are expanded\nusing the previously defined environment variables in the container and\nany service environment variables. If a variable cannot be resolved,\nthe reference in the input string will be unchanged. Double $$ are reduced\nto a single $, which allows for escaping the $(VAR_NAME) syntax: i.e.\n"$$(VAR_NAME)" will produce the string literal "$(VAR_NAME)".\nEscaped references will never be expanded, regardless of whether the variable\nexists or not.\nDefaults to "".', + ) + valueFrom: Optional[ValueFrom2] = Field( + default=None, + description="Source for the environment variable's value. Cannot be used if value is not empty.", + ) + + +class Ignore2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + id: Optional[List[str]] = Field( + default=None, + description="ID references the security issues to be ignored in vulnerability scan", + ) + severity: Optional[Severity] = Field( + default=None, + description='Severity denotes the severity levels of security issues to be ignored, valid values are:\n- "low": it will exclude low severity vulnerabilities, displaying only medium, high and critical vulnerabilities\n- "medium": it will exclude low and medium severity vulnerabilities, displaying only high and critical vulnerabilities\n- "high": it will exclude low, medium and high severity vulnerabilities, displaying only the critical vulnerabilities', + ) + unfixed: Optional[bool] = Field( + default=None, + description="Unfixed indicates to ignore vulnerabilities for which no fix exists", + ) + + +class VulnerabilityScan2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + enabled: Optional[bool] = Field( + default=None, + description="Enabled indicates whether to run vulnerability scan for image", + ) + failOnFinding: Optional[bool] = Field( + default=None, + description="FailOnFinding indicates whether to fail the build run if the vulnerability scan results in vulnerabilities", + ) + ignore: Optional[Ignore2] = Field( + default=None, + description="Ignore refers to ignore options for vulnerability scan", + ) + + +class Output2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + annotations: Optional[Dict[str, str]] = Field( + default=None, + description="Annotations references the additional annotations to be applied on the image", + ) + image: str = Field(..., description="Image is the reference of the image.") + insecure: Optional[bool] = Field( + default=None, description="Insecure defines whether the registry is not secure" + ) + labels: Optional[Dict[str, str]] = Field( + default=None, + description="Labels references the additional labels to be applied on the image", + ) + pushSecret: Optional[str] = Field( + default=None, + description="Describes the secret name for pushing a container image.", + ) + timestamp: Optional[str] = Field( + default=None, + description='Timestamp references the optional image timestamp to be set, valid values are:\n- "Zero", to set 00:00:00 UTC on 1 January 1970\n- "SourceTimestamp", to set the source timestamp dereived from the input source\n- "BuildTimestamp", to set the timestamp of the current build itself\n- Parsable integer number defined as the epoch seconds\n- or nil/empty to not set any specific timestamp', + ) + vulnerabilityScan: Optional[VulnerabilityScan2] = Field( + default=None, + description="VulnerabilityScan provides configurations about running a scan for your generated image", + ) + + +class Value2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapValue: Optional[ConfigMapValue] = Field( + default=None, description="The ConfigMap value of the parameter" + ) + secretValue: Optional[SecretValue] = Field( + default=None, description="The secret value of the parameter" + ) + value: Optional[str] = Field(default=None, description="The value of the parameter") + + +class ParamValue2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + configMapValue: Optional[ConfigMapValue] = Field( + default=None, description="The ConfigMap value of the parameter" + ) + name: str = Field(..., description="Name of the parameter") + secretValue: Optional[SecretValue] = Field( + default=None, description="The secret value of the parameter" + ) + value: Optional[str] = Field(default=None, description="The value of the parameter") + values: Optional[List[Value2]] = Field( + default=None, description="Values of an array parameter" + ) + + +class Retention2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + atBuildDeletion: Optional[bool] = Field( + default=None, + description="AtBuildDeletion defines if related BuildRuns should be deleted when deleting the Build.", + ) + failedLimit: Optional[int] = Field( + default=None, + description="FailedLimit defines the maximum number of failed buildruns that should exist.", + ge=1, + le=10000, + ) + succeededLimit: Optional[int] = Field( + default=None, + description="SucceededLimit defines the maximum number of succeeded buildruns that should exist.", + ge=1, + le=10000, + ) + ttlAfterFailed: Optional[str] = Field( + default=None, + description="TTLAfterFailed defines the maximum duration of time the failed buildrun should exist.", + ) + ttlAfterSucceeded: Optional[str] = Field( + default=None, + description="TTLAfterSucceeded defines the maximum duration of time the succeeded buildrun should exist.", + ) + + +class Source4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + contextDir: Optional[str] = Field( + default=None, + description="ContextDir is a path to subfolder in the repo. Optional.", + ) + git: Optional[Git] = Field( + default=None, description="Git contains the details for the source of type Git" + ) + local: Optional[Local] = Field( + default=None, + description="Local contains the details for the source of type Local", + ) + ociArtifact: Optional[OciArtifact] = Field( + default=None, + description="OCIArtifact contains the details for the source of type OCIArtifact", + ) + type: str = Field( + ..., description="Type is the BuildSource qualifier, the type of the source." + ) + + +class WhenItem1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + github: Optional[Github] = Field( + default=None, + description="GitHub describes how to trigger builds based on GitHub (SCM) events.", + ) + image: Optional[Image] = Field( + default=None, description="Image slice of image names where the event applies." + ) + name: str = Field( + ..., description="Name name or the short description of the trigger condition." + ) + objectRef: Optional[ObjectRef] = Field( + default=None, + description="ObjectRef describes how to match a foreign resource, either using the name or the label\nselector, plus the current resource status.", + ) + type: str = Field(..., description="Type the event type") + + +class Trigger1(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + triggerSecret: Optional[str] = Field( + default=None, + description="TriggerSecret points to a local object carrying the secret token to validate webhook request.", + ) + when: Optional[List[WhenItem1]] = Field( + default=None, + description="When the list of scenarios when a new build should take place.", + ) + + +class Cephfs2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + monitors: List[str] = Field( + ..., + description="monitors is Required: Monitors is a collection of Ceph monitors\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + path: Optional[str] = Field( + default=None, + description="path is Optional: Used as the mounted root, rather than the full Ceph tree, default is /", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly is Optional: Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + secretFile: Optional[str] = Field( + default=None, + description="secretFile is Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is Optional: SecretRef is reference to the authentication secret for User, default is empty.\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + user: Optional[str] = Field( + default=None, + description="user is optional: User is the rados user name, default is admin\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", + ) + + +class Cinder2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is optional: points to a secret object containing parameters used to connect\nto OpenStack.", + ) + volumeID: str = Field( + ..., + description="volumeID used to identify the volume in cinder.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + + +class ConfigMap4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode is optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item10]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional specify whether the ConfigMap or its keys must be defined", + ) + + +class Csi2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + driver: str = Field( + ..., + description="driver is the name of the CSI driver that handles this volume.\nConsult with your admin for the correct name as registered in the cluster.", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType to mount. Ex. "ext4", "xfs", "ntfs".\nIf not provided, the empty value is passed to the associated CSI driver\nwhich will determine the default filesystem to apply.', + ) + nodePublishSecretRef: Optional[NodePublishSecretRef] = Field( + default=None, + description="nodePublishSecretRef is a reference to the secret object containing\nsensitive information to pass to the CSI driver to complete the CSI\nNodePublishVolume and NodeUnpublishVolume calls.\nThis field is optional, and may be empty if no secret is required. If the\nsecret object contains more than one secret, all secret references are passed.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly specifies a read-only configuration for the volume.\nDefaults to false (read/write).", + ) + volumeAttributes: Optional[Dict[str, str]] = Field( + default=None, + description="volumeAttributes stores driver-specific properties that are passed to the CSI\ndriver. Consult your driver's documentation for supported values.", + ) + + +class Item13(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", + ) + mode: Optional[int] = Field( + default=None, + description="Optional: mode bits used to set permissions on this file, must be an octal value\nbetween 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", + ) + + +class DownwardAPI4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="Optional: mode bits to use on created files by default. Must be a\nOptional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item13]] = Field( + default=None, description="Items is a list of downward API volume file" + ) + + +class Selector2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + matchExpressions: Optional[List[MatchExpression]] = Field( + default=None, + description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", + ) + matchLabels: Optional[Dict[str, str]] = Field( + default=None, + description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', + ) + + +class Spec4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + accessModes: Optional[List[str]] = Field( + default=None, + description="accessModes contains the desired access modes the volume should have.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1", + ) + dataSource: Optional[DataSource] = Field( + default=None, + description="dataSource field can be used to specify either:\n* An existing VolumeSnapshot object (snapshot.storage.k8s.io/VolumeSnapshot)\n* An existing PVC (PersistentVolumeClaim)\nIf the provisioner or an external controller can support the specified data source,\nit will create a new volume based on the contents of the specified data source.\nWhen the AnyVolumeDataSource feature gate is enabled, dataSource contents will be copied to dataSourceRef,\nand dataSourceRef contents will be copied to dataSource when dataSourceRef.namespace is not specified.\nIf the namespace is specified, then dataSourceRef will not be copied to dataSource.", + ) + dataSourceRef: Optional[DataSourceRef] = Field( + default=None, + description="dataSourceRef specifies the object from which to populate the volume with data, if a non-empty\nvolume is desired. This may be any object from a non-empty API group (non\ncore object) or a PersistentVolumeClaim object.\nWhen this field is specified, volume binding will only succeed if the type of\nthe specified object matches some installed volume populator or dynamic\nprovisioner.\nThis field will replace the functionality of the dataSource field and as such\nif both fields are non-empty, they must have the same value. For backwards\ncompatibility, when namespace isn't specified in dataSourceRef,\nboth fields (dataSource and dataSourceRef) will be set to the same\nvalue automatically if one of them is empty and the other is non-empty.\nWhen namespace is specified in dataSourceRef,\ndataSource isn't set to the same value and must be empty.\nThere are three important differences between dataSource and dataSourceRef:\n* While dataSource only allows two specific types of objects, dataSourceRef\n allows any non-core object, as well as PersistentVolumeClaim objects.\n* While dataSource ignores disallowed values (dropping them), dataSourceRef\n preserves all values, and generates an error if a disallowed value is\n specified.\n* While dataSource only allows local objects, dataSourceRef allows objects\n in any namespaces.\n(Beta) Using this field requires the AnyVolumeDataSource feature gate to be enabled.\n(Alpha) Using the namespace field of dataSourceRef requires the CrossNamespaceVolumeDataSource feature gate to be enabled.", + ) + resources: Optional[Resources] = Field( + default=None, + description="resources represents the minimum resources the volume should have.\nIf RecoverVolumeExpansionFailure feature is enabled users are allowed to specify resource requirements\nthat are lower than previous value but must still be higher than capacity recorded in the\nstatus field of the claim.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#resources", + ) + selector: Optional[Selector2] = Field( + default=None, + description="selector is a label query over volumes to consider for binding.", + ) + storageClassName: Optional[str] = Field( + default=None, + description="storageClassName is the name of the StorageClass required by the claim.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#class-1", + ) + volumeAttributesClassName: Optional[str] = Field( + default=None, + description="volumeAttributesClassName may be used to set the VolumeAttributesClass used by this claim.\nIf specified, the CSI driver will create or update the volume with the attributes defined\nin the corresponding VolumeAttributesClass. This has a different purpose than storageClassName,\nit can be changed after the claim is created. An empty string value means that no VolumeAttributesClass\nwill be applied to the claim but it's not allowed to reset this field to empty string once it is set.\nIf unspecified and the PersistentVolumeClaim is unbound, the default VolumeAttributesClass\nwill be set by the persistentvolume controller if it exists.\nIf the resource referred to by volumeAttributesClass does not exist, this PersistentVolumeClaim will be\nset to a Pending state, as reflected by the modifyVolumeStatus field, until such as a resource\nexists.\nMore info: https://kubernetes.io/docs/concepts/storage/volume-attributes-classes/\n(Alpha) Using this field requires the VolumeAttributesClass feature gate to be enabled.", + ) + volumeMode: Optional[str] = Field( + default=None, + description="volumeMode defines what type of volume is required by the claim.\nValue of Filesystem is implied when not included in claim spec.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the binding reference to the PersistentVolume backing this claim.", + ) + + +class VolumeClaimTemplate2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + metadata: Optional[Dict[str, Any]] = Field( + default=None, + description="May contain labels and annotations that will be copied into the PVC\nwhen creating it. No other fields are allowed and will be rejected during\nvalidation.", + ) + spec: Spec4 = Field( + ..., + description="The specification for the PersistentVolumeClaim. The entire content is\ncopied unchanged into the PVC that gets created from this\ntemplate. The same fields as in a PersistentVolumeClaim\nare also valid here.", + ) + + +class Ephemeral2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + volumeClaimTemplate: Optional[VolumeClaimTemplate2] = Field( + default=None, + description="Will be used to create a stand-alone PVC to provision the volume.\nThe pod in which this EphemeralVolumeSource is embedded will be the\nowner of the PVC, i.e. the PVC will be deleted together with the\npod. The name of the PVC will be `-` where\n`` is the name from the `PodSpec.Volumes` array\nentry. Pod validation will reject the pod if the concatenated name\nis not valid for a PVC (for example, too long).\n\n\nAn existing PVC with that name that is not owned by the pod\nwill *not* be used for the pod to avoid using an unrelated\nvolume by mistake. Starting the pod is then blocked until\nthe unrelated PVC is removed. If such a pre-created PVC is\nmeant to be used by the pod, the PVC has to updated with an\nowner reference to the pod once the pod exists. Normally\nthis should not be necessary, but it may be useful when\nmanually reconstructing a broken cluster.\n\n\nThis field is read-only and no changes will be made by Kubernetes\nto the PVC after it has been created.\n\n\nRequired, must not be nil.", + ) + + +class FlexVolume2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + driver: str = Field( + ..., description="driver is the name of the driver to use for this volume." + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script.', + ) + options: Optional[Dict[str, str]] = Field( + default=None, + description="options is Optional: this field holds extra command options if any.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly is Optional: defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is Optional: secretRef is reference to the secret object containing\nsensitive information to pass to the plugin scripts. This may be\nempty if no secret object is specified. If the secret object\ncontains more than one secret, all secrets are passed to the plugin\nscripts.", + ) + + +class Iscsi2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + chapAuthDiscovery: Optional[bool] = Field( + default=None, + description="chapAuthDiscovery defines whether support iSCSI Discovery CHAP authentication", + ) + chapAuthSession: Optional[bool] = Field( + default=None, + description="chapAuthSession defines whether support iSCSI Session CHAP authentication", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#iscsi\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + initiatorName: Optional[str] = Field( + default=None, + description="initiatorName is the custom iSCSI Initiator Name.\nIf initiatorName is specified with iscsiInterface simultaneously, new iSCSI interface\n: will be created for the connection.", + ) + iqn: str = Field(..., description="iqn is the target iSCSI Qualified Name.") + iscsiInterface: Optional[str] = Field( + default=None, + description="iscsiInterface is the interface Name that uses an iSCSI transport.\nDefaults to 'default' (tcp).", + ) + lun: int = Field(..., description="lun represents iSCSI Target Lun number.") + portals: Optional[List[str]] = Field( + default=None, + description="portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port\nis other than default (typically TCP ports 860 and 3260).", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the ReadOnly setting in VolumeMounts.\nDefaults to false.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is the CHAP Secret for iSCSI target and initiator authentication", + ) + targetPortal: str = Field( + ..., + description="targetPortal is iSCSI Target Portal. The Portal is either an IP or ip_addr:port if the port\nis other than default (typically TCP ports 860 and 3260).", + ) + + +class LabelSelector2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + matchExpressions: Optional[List[MatchExpression]] = Field( + default=None, + description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", + ) + matchLabels: Optional[Dict[str, str]] = Field( + default=None, + description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', + ) + + +class ClusterTrustBundle2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + labelSelector: Optional[LabelSelector2] = Field( + default=None, + description='Select all ClusterTrustBundles that match this label selector. Only has\neffect if signerName is set. Mutually-exclusive with name. If unset,\ninterpreted as "match nothing". If set but empty, interpreted as "match\neverything".', + ) + name: Optional[str] = Field( + default=None, + description="Select a single ClusterTrustBundle by object name. Mutually-exclusive\nwith signerName and labelSelector.", + ) + optional: Optional[bool] = Field( + default=None, + description="If true, don't block pod startup if the referenced ClusterTrustBundle(s)\naren't available. If using name, then the named ClusterTrustBundle is\nallowed not to exist. If using signerName, then the combination of\nsignerName and labelSelector is allowed to match zero\nClusterTrustBundles.", + ) + path: str = Field( + ..., description="Relative path from the volume root to write the bundle." + ) + signerName: Optional[str] = Field( + default=None, + description="Select all ClusterTrustBundles that match this signer name.\nMutually-exclusive with name. The contents of all selected\nClusterTrustBundles will be unified and deduplicated.", + ) + + +class Item14(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="key is the key to project.") + mode: Optional[int] = Field( + default=None, + description="mode is Optional: mode bits used to set permissions on this file.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="path is the relative path of the file to map the key to.\nMay not be an absolute path.\nMay not contain the path element '..'.\nMay not start with the string '..'.", + ) + + +class ConfigMap5(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item14]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional specify whether the ConfigMap or its keys must be defined", + ) + + +class Item15(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fieldRef: Optional[FieldRef] = Field( + default=None, + description="Required: Selects a field of the pod: only annotations, labels, name, namespace and uid are supported.", + ) + mode: Optional[int] = Field( + default=None, + description="Optional: mode bits used to set permissions on this file, must be an octal value\nbetween 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="Required: Path is the relative path name of the file to be created. Must not be absolute or contain the '..' path. Must be utf-8 encoded. The first item of the relative path must not start with '..'", + ) + resourceFieldRef: Optional[ResourceFieldRef] = Field( + default=None, + description="Selects a resource of the container: only resources limits and requests\n(limits.cpu, limits.memory, requests.cpu and requests.memory) are currently supported.", + ) + + +class DownwardAPI5(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item15]] = Field( + default=None, description="Items is a list of DownwardAPIVolume file" + ) + + +class Item16(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + key: str = Field(..., description="key is the key to project.") + mode: Optional[int] = Field( + default=None, + description="mode is Optional: mode bits used to set permissions on this file.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nIf not specified, the volume defaultMode will be used.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + path: str = Field( + ..., + description="path is the relative path of the file to map the key to.\nMay not be an absolute path.\nMay not contain the path element '..'.\nMay not start with the string '..'.", + ) + + +class Secret4(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + items: Optional[List[Item16]] = Field( + default=None, + description="items if unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + name: str = Field( + default="", + description="Name of the referent.\nThis field is effectively required, but due to backwards compatibility is\nallowed to be empty. Instances of this type with an empty value here are\nalmost certainly wrong.\nTODO: Add other useful fields. apiVersion, kind, uid?\nMore info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names\nTODO: Drop `kubebuilder:default` when controller-gen doesn't need it https://github.com/kubernetes-sigs/kubebuilder/issues/3896.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional field specify whether the Secret or its key must be defined", + ) + + +class Source5(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + clusterTrustBundle: Optional[ClusterTrustBundle2] = Field( + default=None, + description="ClusterTrustBundle allows a pod to access the `.spec.trustBundle` field\nof ClusterTrustBundle objects in an auto-updating file.\n\n\nAlpha, gated by the ClusterTrustBundleProjection feature gate.\n\n\nClusterTrustBundle objects can either be selected by name, or by the\ncombination of signer name and a label selector.\n\n\nKubelet performs aggressive normalization of the PEM contents written\ninto the pod filesystem. Esoteric PEM features such as inter-block\ncomments and block headers are stripped. Certificates are deduplicated.\nThe ordering of certificates within the file is arbitrary, and Kubelet\nmay change the order over time.", + ) + configMap: Optional[ConfigMap5] = Field( + default=None, + description="configMap information about the configMap data to project", + ) + downwardAPI: Optional[DownwardAPI5] = Field( + default=None, + description="downwardAPI information about the downwardAPI data to project", + ) + secret: Optional[Secret4] = Field( + default=None, description="secret information about the secret data to project" + ) + serviceAccountToken: Optional[ServiceAccountToken] = Field( + default=None, + description="serviceAccountToken is information about the serviceAccountToken data to project", + ) + + +class Projected2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode are the mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + sources: Optional[List[Source5]] = Field( + default=None, description="sources is the list of volume projections" + ) + + +class Rbd2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type of the volume that you want to mount.\nTip: Ensure that the filesystem type is supported by the host operating system.\nExamples: "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#rbd\nTODO: how do we prevent errors in the filesystem from compromising the machine', + ) + image: str = Field( + ..., + description="image is the rados image name.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + keyring: Optional[str] = Field( + default=None, + description="keyring is the path to key ring for RBDUser.\nDefault is /etc/ceph/keyring.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + monitors: List[str] = Field( + ..., + description="monitors is a collection of Ceph monitors.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + pool: Optional[str] = Field( + default=None, + description="pool is the rados pool name.\nDefault is rbd.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly here will force the ReadOnly setting in VolumeMounts.\nDefaults to false.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef is name of the authentication secret for RBDUser. If provided\noverrides keyring.\nDefault is nil.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + user: Optional[str] = Field( + default=None, + description="user is the rados user name.\nDefault is admin.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", + ) + + +class ScaleIO2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs".\nDefault is "xfs".', + ) + gateway: str = Field( + ..., description="gateway is the host address of the ScaleIO API Gateway." + ) + protectionDomain: Optional[str] = Field( + default=None, + description="protectionDomain is the name of the ScaleIO Protection Domain for the configured storage.", + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: SecretRef = Field( + ..., + description="secretRef references to the secret for ScaleIO user and other\nsensitive information. If this is not provided, Login operation will fail.", + ) + sslEnabled: Optional[bool] = Field( + default=None, + description="sslEnabled Flag enable/disable SSL communication with Gateway, default false", + ) + storageMode: Optional[str] = Field( + default=None, + description="storageMode indicates whether the storage for a volume should be ThickProvisioned or ThinProvisioned.\nDefault is ThinProvisioned.", + ) + storagePool: Optional[str] = Field( + default=None, + description="storagePool is the ScaleIO Storage Pool associated with the protection domain.", + ) + system: str = Field( + ..., + description="system is the name of the storage system as configured in ScaleIO.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the name of a volume already created in the ScaleIO system\nthat is associated with this volume source.", + ) + + +class Secret5(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + defaultMode: Optional[int] = Field( + default=None, + description="defaultMode is Optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values\nfor mode bits. Defaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", + ) + items: Optional[List[Item16]] = Field( + default=None, + description="items If unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", + ) + optional: Optional[bool] = Field( + default=None, + description="optional field specify whether the Secret or its keys must be defined", + ) + secretName: Optional[str] = Field( + default=None, + description="secretName is the name of the secret in the pod's namespace to use.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret", + ) + + +class Storageos2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + fsType: Optional[str] = Field( + default=None, + description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". Implicitly inferred to be "ext4" if unspecified.', + ) + readOnly: Optional[bool] = Field( + default=None, + description="readOnly defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", + ) + secretRef: Optional[SecretRef] = Field( + default=None, + description="secretRef specifies the secret to use for obtaining the StorageOS API\ncredentials. If not specified, default values will be attempted.", + ) + volumeName: Optional[str] = Field( + default=None, + description="volumeName is the human-readable name of the StorageOS volume. Volume\nnames are only unique within a namespace.", + ) + volumeNamespace: Optional[str] = Field( + default=None, + description='volumeNamespace specifies the scope of the volume within StorageOS. If no\nnamespace is specified then the Pod\'s namespace will be used. This allows the\nKubernetes name scoping to be mirrored within StorageOS for tighter integration.\nSet VolumeName to any name to override the default behaviour.\nSet to "default" if you are not using namespaces within StorageOS.\nNamespaces that do not pre-exist within StorageOS will be created.', + ) + + +class Volume2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + awsElasticBlockStore: Optional[AwsElasticBlockStore] = Field( + default=None, + description="awsElasticBlockStore represents an AWS Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#awselasticblockstore", + ) + azureDisk: Optional[AzureDisk] = Field( + default=None, + description="azureDisk represents an Azure Data Disk mount on the host and bind mount to the pod.", + ) + azureFile: Optional[AzureFile] = Field( + default=None, + description="azureFile represents an Azure File Service mount on the host and bind mount to the pod.", + ) + cephfs: Optional[Cephfs2] = Field( + default=None, + description="cephFS represents a Ceph FS mount on the host that shares a pod's lifetime", + ) + cinder: Optional[Cinder2] = Field( + default=None, + description="cinder represents a cinder volume attached and mounted on kubelets host machine.\nMore info: https://examples.k8s.io/mysql-cinder-pd/README.md", + ) + configMap: Optional[ConfigMap4] = Field( + default=None, + description="configMap represents a configMap that should populate this volume", + ) + csi: Optional[Csi2] = Field( + default=None, + description="csi (Container Storage Interface) represents ephemeral storage that is handled by certain external CSI drivers (Beta feature).", + ) + downwardAPI: Optional[DownwardAPI4] = Field( + default=None, + description="downwardAPI represents downward API about the pod that should populate this volume", + ) + emptyDir: Optional[EmptyDir] = Field( + default=None, + description="emptyDir represents a temporary directory that shares a pod's lifetime.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#emptydir", + ) + ephemeral: Optional[Ephemeral2] = Field( + default=None, + description="ephemeral represents a volume that is handled by a cluster storage driver.\nThe volume's lifecycle is tied to the pod that defines it - it will be created before the pod starts,\nand deleted when the pod is removed.\n\n\nUse this if:\na) the volume is only needed while the pod runs,\nb) features of normal volumes like restoring from snapshot or capacity\n tracking are needed,\nc) the storage driver is specified through a storage class, and\nd) the storage driver supports dynamic volume provisioning through\n a PersistentVolumeClaim (see EphemeralVolumeSource for more\n information on the connection between this volume type\n and PersistentVolumeClaim).\n\n\nUse PersistentVolumeClaim or one of the vendor-specific\nAPIs for volumes that persist for longer than the lifecycle\nof an individual pod.\n\n\nUse CSI for light-weight local ephemeral volumes if the CSI driver is meant to\nbe used that way - see the documentation of the driver for\nmore information.\n\n\nA pod can use both types of ephemeral volumes and\npersistent volumes at the same time.", + ) + fc: Optional[Fc] = Field( + default=None, + description="fc represents a Fibre Channel resource that is attached to a kubelet's host machine and then exposed to the pod.", + ) + flexVolume: Optional[FlexVolume2] = Field( + default=None, + description="flexVolume represents a generic volume resource that is\nprovisioned/attached using an exec based plugin.", + ) + flocker: Optional[Flocker] = Field( + default=None, + description="flocker represents a Flocker volume attached to a kubelet's host machine. This depends on the Flocker control service being running", + ) + gcePersistentDisk: Optional[GcePersistentDisk] = Field( + default=None, + description="gcePersistentDisk represents a GCE Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#gcepersistentdisk", + ) + gitRepo: Optional[GitRepo] = Field( + default=None, + description="gitRepo represents a git repository at a particular revision.\nDEPRECATED: GitRepo is deprecated. To provision a container with a git repo, mount an\nEmptyDir into an InitContainer that clones the repo using git, then mount the EmptyDir\ninto the Pod's container.", + ) + glusterfs: Optional[Glusterfs] = Field( + default=None, + description="glusterfs represents a Glusterfs mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/glusterfs/README.md", + ) + hostPath: Optional[HostPath] = Field( + default=None, + description="hostPath represents a pre-existing file or directory on the host\nmachine that is directly exposed to the container. This is generally\nused for system agents or other privileged things that are allowed\nto see the host machine. Most containers will NOT need this.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#hostpath\n---\nTODO(jonesdl) We need to restrict who can use host directory mounts and who can/can not\nmount host directories as read/write.", + ) + iscsi: Optional[Iscsi2] = Field( + default=None, + description="iscsi represents an ISCSI Disk resource that is attached to a\nkubelet's host machine and then exposed to the pod.\nMore info: https://examples.k8s.io/volumes/iscsi/README.md", + ) + name: str = Field(..., description="Name of the Build Volume") + nfs: Optional[Nfs] = Field( + default=None, + description="nfs represents an NFS mount on the host that shares a pod's lifetime\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#nfs", + ) + persistentVolumeClaim: Optional[PersistentVolumeClaim] = Field( + default=None, + description="persistentVolumeClaimVolumeSource represents a reference to a\nPersistentVolumeClaim in the same namespace.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#persistentvolumeclaims", + ) + photonPersistentDisk: Optional[PhotonPersistentDisk] = Field( + default=None, + description="photonPersistentDisk represents a PhotonController persistent disk attached and mounted on kubelets host machine", + ) + portworxVolume: Optional[PortworxVolume] = Field( + default=None, + description="portworxVolume represents a portworx volume attached and mounted on kubelets host machine", + ) + projected: Optional[Projected2] = Field( + default=None, + description="projected items for all in one resources secrets, configmaps, and downward API", + ) + quobyte: Optional[Quobyte] = Field( + default=None, + description="quobyte represents a Quobyte mount on the host that shares a pod's lifetime", + ) + rbd: Optional[Rbd2] = Field( + default=None, + description="rbd represents a Rados Block Device mount on the host that shares a pod's lifetime.\nMore info: https://examples.k8s.io/volumes/rbd/README.md", + ) + scaleIO: Optional[ScaleIO2] = Field( + default=None, + description="scaleIO represents a ScaleIO persistent volume attached and mounted on Kubernetes nodes.", + ) + secret: Optional[Secret5] = Field( + default=None, + description="secret represents a secret that should populate this volume.\nMore info: https://kubernetes.io/docs/concepts/storage/volumes#secret", + ) + storageos: Optional[Storageos2] = Field( + default=None, + description="storageOS represents a StorageOS volume attached and mounted on Kubernetes nodes.", + ) + vsphereVolume: Optional[VsphereVolume] = Field( + default=None, + description="vsphereVolume represents a vSphere volume attached and mounted on kubelets host machine", + ) + + +class BuildSpec(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + env: Optional[List[EnvItem2]] = Field( + default=None, + description="Env contains additional environment variables that should be passed to the build container", + ) + nodeSelector: Optional[Dict[str, str]] = Field( + default=None, + description="NodeSelector is a selector which must be true for the pod to fit on a node.\nSelector which must match a node's labels for the pod to be scheduled on that node.\nMore info: https://kubernetes.io/docs/concepts/configuration/assign-pod-node/", + ) + output: Output2 = Field( + ..., + description="Output refers to the location where the built image would be pushed.", + ) + paramValues: Optional[List[ParamValue2]] = Field( + default=None, + description="Params is a list of key/value that could be used\nto set strategy parameters", + ) + retention: Optional[Retention2] = Field( + default=None, description="Contains information about retention params" + ) + schedulerName: Optional[str] = Field( + default=None, + description="SchedulerName specifies the scheduler to be used to dispatch the Pod", + ) + source: Optional[Source4] = Field( + default=None, + description="Source refers to the location where the source code is,\nthis could be a git repository, a local source or an oci\nartifact", + ) + strategy: Strategy = Field( + ..., + description="Strategy references the BuildStrategy to use to build the container\nimage.", + ) + timeout: Optional[str] = Field( + default=None, + description="Timeout defines the maximum amount of time the Build should take to execute.", + ) + tolerations: Optional[List[Toleration]] = Field( + default=None, description="If specified, the pod's tolerations." + ) + trigger: Optional[Trigger1] = Field( + default=None, + description="Trigger defines the scenarios where a new build should be triggered.", + ) + volumes: Optional[List[Volume2]] = Field( + default=None, + description="Volumes contains volume Overrides of the BuildStrategy volumes in case those are allowed\nto be overridden. Must only contain volumes that exist in the corresponding BuildStrategy", + ) + + +class Condition(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + lastTransitionTime: datetime = Field( + ..., + description="LastTransitionTime last time the condition transit from one status to another.", + ) + message: str = Field( + ..., + description="A human readable message indicating details about the transition.", + ) + reason: str = Field( + ..., description="The reason for the condition last transition." + ) + status: str = Field( + ..., description="Status of the condition, one of True, False, Unknown." + ) + type: str = Field(..., description="Type of condition") + + +class Location(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + container: Optional[str] = None + pod: Optional[str] = None + + +class FailureDetails(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + location: Optional[Location] = Field( + default=None, + description="Location describes the location where the failure happened", + ) + message: Optional[str] = None + reason: Optional[str] = None + + +class Vulnerability(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + id: Optional[str] = None + severity: Optional[str] = Field( + default=None, + description="VulnerabilitySeverity is an enum for the possible values for severity of a vulnerability", + ) + + +class Output3(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + digest: Optional[str] = Field( + default=None, description="Digest holds the digest of output image" + ) + size: Optional[int] = Field( + default=None, description="Size holds the compressed size of output image" + ) + vulnerabilities: Optional[List[Vulnerability]] = Field( + default=None, + description="Vulnerabilities holds the list of vulnerabilities detected in the image", + ) + + +class Git2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + branchName: Optional[str] = Field( + default=None, + description="BranchName holds the default branch name of the git source\nthis will be set only when revision is not specified in Build object", + ) + commitAuthor: Optional[str] = Field( + default=None, description="CommitAuthor holds the commit author of a git source" + ) + commitSha: Optional[str] = Field( + default=None, description="CommitSha holds the commit sha of git source" + ) + + +class OciArtifact2(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + digest: Optional[str] = Field( + default=None, description="Digest hold the image digest result" + ) + + +class Source6(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + git: Optional[Git2] = Field( + default=None, + description="Git holds the results emitted from the\nsource step of type git", + ) + ociArtifact: Optional[OciArtifact2] = Field( + default=None, + description="OciArtifact holds the results emitted from\nthe source step of type ociArtifact", + ) + timestamp: Optional[datetime] = Field( + default=None, + description="Timestamp holds the timestamp of the source, which\ndepends on the actual source type and could range from\nbeing the commit timestamp or the fileystem timestamp\nof the most recent source file in the working directory", + ) + + +class Status(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + buildSpec: Optional[BuildSpec] = Field( + default=None, description="BuildSpec is the Build Spec of this BuildRun." + ) + completionTime: Optional[datetime] = Field( + default=None, description="CompletionTime is the time the build completed." + ) + conditions: Optional[List[Condition]] = Field( + default=None, + description="Conditions holds the latest available observations of a resource's current state.", + ) + failureDetails: Optional[FailureDetails] = Field( + default=None, + description="FailureDetails contains error details that are collected and surfaced from TaskRun", + ) + output: Optional[Output3] = Field( + default=None, + description="Output holds the results emitted from step definition of an output", + ) + source: Optional[Source6] = Field( + default=None, + description="Source holds the results emitted from the source step", + ) + startTime: Optional[datetime] = Field( + default=None, description="StartTime is the time the build is actually started." + ) + taskRunName: Optional[str] = Field( + default=None, + description="TaskRunName is the name of the TaskRun responsible for executing this BuildRun.", + ) + + +class Model(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + apiVersion: Optional[str] = Field( + default=None, + description="APIVersion defines the versioned schema of this representation of an object.\nServers should convert recognized schemas to the latest internal value, and\nmay reject unrecognized values.\nMore info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources", + ) + kind: Optional[str] = Field( + default=None, + description="Kind is a string value representing the REST resource this object represents.\nServers may infer this from the endpoint the client submits requests to.\nCannot be updated.\nIn CamelCase.\nMore info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", + ) + metadata: Optional[Dict[str, Any]] = None + spec: Spec = Field( + ..., description="BuildRunSpec defines the desired state of BuildRun" + ) + status: Optional[Status] = Field( + default=None, + description="BuildRunStatus defines the observed state of BuildRun", + ) diff --git a/components/renku_data_services/session/cr_tekton_taskrun.py b/components/renku_data_services/session/cr_tekton_taskrun.py new file mode 100644 index 000000000..73202ca30 --- /dev/null +++ b/components/renku_data_services/session/cr_tekton_taskrun.py @@ -0,0 +1,23 @@ +"""Models for Tekton TaskRuns.""" + +from pydantic import ConfigDict + +from renku_data_services.session.cr_base import BaseCRD + + +class TaskRunStatus(BaseCRD): + """The status field of a TaskRun.""" + + podName: str | None + + +class TaskRunBase(BaseCRD): + """Base model for a TaskRun.""" + + model_config = ConfigDict( + extra="allow", + ) + kind: str = "TaskRun" + apiVersion: str = "tekton.dev/v1" + + status: TaskRunStatus | None diff --git a/components/renku_data_services/session/crs.py b/components/renku_data_services/session/crs.py new file mode 100644 index 000000000..f1c74470d --- /dev/null +++ b/components/renku_data_services/session/crs.py @@ -0,0 +1,65 @@ +"""Custom resource definition with proper names from the autogenerated code.""" + +from datetime import datetime + +from pydantic import BaseModel, Field, RootModel + +from renku_data_services.session.cr_shipwright_buildrun import Build, Git, ParamValue, Strategy, Toleration +from renku_data_services.session.cr_shipwright_buildrun import Model as _BuildRun +from renku_data_services.session.cr_shipwright_buildrun import Output as BuildOutput +from renku_data_services.session.cr_shipwright_buildrun import Retention1 as Retention +from renku_data_services.session.cr_shipwright_buildrun import Source as BuildSource +from renku_data_services.session.cr_shipwright_buildrun import Spec as BuildRunSpec +from renku_data_services.session.cr_shipwright_buildrun import Spec1 as BuildSpec +from renku_data_services.session.cr_tekton_taskrun import TaskRunBase as _TaskRunBase + + +class Metadata(BaseModel): + """Basic k8s metadata spec.""" + + class Config: + """Do not exclude unknown properties.""" + + extra = "allow" + + name: str + namespace: str | None = None + labels: dict[str, str] = Field(default_factory=dict) + annotations: dict[str, str] = Field(default_factory=dict) + uid: str | None = None + creationTimestamp: datetime | None = None + deletionTimestamp: datetime | None = None + + +class BuildRun(_BuildRun): + """Shipwright BuildRun.""" + + kind: str = "BuildRun" + apiVersion: str = "shipwright.io/v1beta1" + # Here we overwrite the default from ASModel because it is too weakly typed + metadata: Metadata # type: ignore[assignment] + + +class GitSource(BuildSource): + """Git repository as a source for builds.""" + + type: str = "Git" + git: Git + + +class TaskRun(_TaskRunBase): + """Tekton TaskRun.""" + + metadata: Metadata + + +class NodeSelector(RootModel[dict[str, str] | None]): + """A k8s node selector.""" + + root: dict[str, str] | None = None + + +class Tolerations(RootModel[list[Toleration] | None]): + """A list of k8s tolerations.""" + + root: list[Toleration] | None = None diff --git a/components/renku_data_services/session/db.py b/components/renku_data_services/session/db.py index a001148e3..71463a655 100644 --- a/components/renku_data_services/session/db.py +++ b/components/renku_data_services/session/db.py @@ -5,6 +5,8 @@ from collections.abc import Callable from contextlib import AbstractAsyncContextManager, nullcontext from datetime import UTC, datetime +from pathlib import PurePosixPath +from typing import TYPE_CHECKING from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession @@ -12,32 +14,47 @@ import renku_data_services.base_models as base_models from renku_data_services import errors +from renku_data_services.app_config import logging from renku_data_services.authz.authz import Authz, ResourceType from renku_data_services.authz.models import Scope from renku_data_services.base_models.core import RESET from renku_data_services.crc.db import ResourcePoolRepository -from renku_data_services.session import models +from renku_data_services.session import constants, models from renku_data_services.session import orm as schemas +from renku_data_services.session.k8s_client import ShipwrightClient + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from renku_data_services.session.config import BuildsConfig class SessionRepository: """Repository for sessions.""" def __init__( - self, session_maker: Callable[..., AsyncSession], project_authz: Authz, resource_pools: ResourcePoolRepository + self, + session_maker: Callable[..., AsyncSession], + project_authz: Authz, + resource_pools: ResourcePoolRepository, + shipwright_client: ShipwrightClient | None, + builds_config: BuildsConfig, ) -> None: self.session_maker = session_maker self.project_authz: Authz = project_authz self.resource_pools: ResourcePoolRepository = resource_pools + self.shipwright_client = shipwright_client + self.builds_config = builds_config - async def get_environments(self) -> list[models.Environment]: + async def get_environments(self, include_archived: bool = False) -> list[models.Environment]: """Get all global session environments from the database.""" async with self.session_maker() as session: - res = await session.scalars( - select(schemas.EnvironmentORM).where( - schemas.EnvironmentORM.environment_kind == models.EnvironmentKind.GLOBAL.value - ) + statement = select(schemas.EnvironmentORM).where( + schemas.EnvironmentORM.environment_kind == models.EnvironmentKind.GLOBAL.value ) + if not include_archived: + statement = statement.where(schemas.EnvironmentORM.is_archived.is_(False)) + res = await session.scalars(statement) environments = res.all() return [e.dump() for e in environments] @@ -78,14 +95,105 @@ def __insert_environment( uid=new_environment.uid, gid=new_environment.gid, environment_kind=new_environment.environment_kind, + environment_image_source=new_environment.environment_image_source, command=new_environment.command, args=new_environment.args, creation_date=datetime.now(UTC).replace(microsecond=0), + is_archived=new_environment.is_archived, ) session.add(environment) return environment + def __copy_environment( + self, + user: base_models.APIUser, + session: AsyncSession, + environment: models.Environment, + ) -> schemas.EnvironmentORM: + if user.id is None: + raise errors.UnauthorizedError( + message="You have to be authenticated to insert an environment in the DB.", quiet=True + ) + new_environment = schemas.EnvironmentORM( + name=environment.name, + created_by_id=user.id, + description=environment.description, + container_image=environment.container_image, + default_url=environment.default_url, + port=environment.port, + working_directory=environment.working_directory, + mount_directory=environment.mount_directory, + uid=environment.uid, + gid=environment.gid, + environment_kind=environment.environment_kind, + command=environment.command, + args=environment.args, + creation_date=datetime.now(UTC).replace(microsecond=0), + is_archived=environment.is_archived, + environment_image_source=environment.environment_image_source, + ) + + if environment.environment_image_source == models.EnvironmentImageSource.build: + if not environment.build_parameters: + raise errors.ProgrammingError(message="Environment has no build parameters.") + new_build_parameters = schemas.BuildParametersORM( + builder_variant=environment.build_parameters.builder_variant, + frontend_variant=environment.build_parameters.frontend_variant, + repository=environment.build_parameters.repository, + repository_revision=environment.build_parameters.repository_revision, + context_dir=environment.build_parameters.context_dir, + ) + session.add(new_build_parameters) + + new_environment.build_parameters_id = new_build_parameters.id + new_environment.build_parameters = new_build_parameters + + session.add(new_environment) + return new_environment + + def __insert_build_parameters_environment( + self, + user: base_models.APIUser, + session: AsyncSession, + launcher: schemas.SessionLauncherORM, + new_build_parameters_environment: models.UnsavedBuildParameters, + ) -> schemas.EnvironmentORM: + if user.id is None: + raise errors.UnauthorizedError( + message="You have to be authenticated to insert an environment in the DB.", quiet=True + ) + build_parameters_orm = schemas.BuildParametersORM( + builder_variant=new_build_parameters_environment.builder_variant, + frontend_variant=new_build_parameters_environment.frontend_variant, + repository=new_build_parameters_environment.repository, + repository_revision=new_build_parameters_environment.repository_revision, + context_dir=new_build_parameters_environment.context_dir, + ) + session.add(build_parameters_orm) + + environment_orm = schemas.EnvironmentORM( + name=launcher.name, + created_by_id=user.id, + description=f"Generated environment for {launcher.name}", + container_image="image:unknown-at-the-moment", # TODO: This should come from the build + default_url="/lab", # TODO: This should come from the build + port=8888, # TODO: This should come from the build + working_directory=None, # TODO: This should come from the build + mount_directory=None, # TODO: This should come from the build + uid=1000, # TODO: This should come from the build + gid=1000, # TODO: This should come from the build + environment_kind=models.EnvironmentKind.CUSTOM, + command=None, # TODO: This should come from the build + args=None, # TODO: This should come from the build + creation_date=datetime.now(UTC).replace(microsecond=0), + environment_image_source=models.EnvironmentImageSource.build, + build_parameters_id=build_parameters_orm.id, + build_parameters=build_parameters_orm, + ) + session.add(environment_orm) + return environment_orm + async def insert_environment( self, user: base_models.APIUser, environment: models.UnsavedEnvironment ) -> models.Environment: @@ -119,9 +227,13 @@ def __update_environment( environment.default_url = update.default_url if update.port is not None: environment.port = update.port - if update.working_directory is not None: + if update.working_directory is not None and update.working_directory is RESET: + environment.working_directory = None + elif update.working_directory is not None and isinstance(update.working_directory, PurePosixPath): environment.working_directory = update.working_directory - if update.mount_directory is not None: + if update.mount_directory is not None and update.mount_directory is RESET: + environment.mount_directory = None + elif update.mount_directory is not None and isinstance(update.mount_directory, PurePosixPath): environment.mount_directory = update.mount_directory if update.uid is not None: environment.uid = update.uid @@ -136,6 +248,33 @@ def __update_environment( elif isinstance(update.command, list): environment.command = update.command + if update.is_archived is not None: + environment.is_archived = update.is_archived + + async def __update_environment_build_parameters( + self, environment: schemas.EnvironmentORM, update: models.EnvironmentPatch + ) -> None: + # TODO: For now, we don't allow updating other fields of a session environment + if not update.build_parameters: + return + + build_parameters = update.build_parameters + + if build_parameters.repository is not None: + environment.build_parameters.repository = build_parameters.repository + if build_parameters.builder_variant is not None: + environment.build_parameters.builder_variant = build_parameters.builder_variant + if build_parameters.frontend_variant is not None: + environment.build_parameters.frontend_variant = build_parameters.frontend_variant + if build_parameters.repository_revision == "": + environment.build_parameters.repository_revision = None + elif build_parameters.repository_revision: + environment.build_parameters.repository_revision = build_parameters.repository_revision + if build_parameters.context_dir == "": + environment.build_parameters.context_dir = None + elif build_parameters.context_dir: + environment.build_parameters.context_dir = build_parameters.context_dir + async def update_environment( self, user: base_models.APIUser, environment_id: ULID, patch: models.EnvironmentPatch ) -> models.Environment: @@ -242,6 +381,8 @@ async def insert_launcher( message=f"Project with id '{project_id}' does not exist or you do not have access to it." ) + start_build = False + async with self.session_maker() as session, session.begin(): res = await session.scalars(select(schemas.ProjectORM).where(schemas.ProjectORM.id == project_id)) project = res.one_or_none() @@ -269,8 +410,41 @@ async def insert_launcher( command=launcher.environment.command, args=launcher.environment.args, creation_date=datetime.now(UTC).replace(microsecond=0), + environment_image_source=models.EnvironmentImageSource.image, + ) + session.add(environment_orm) + elif isinstance(launcher.environment, models.UnsavedBuildParameters): + build_parameters_orm = schemas.BuildParametersORM( + builder_variant=launcher.environment.builder_variant, + frontend_variant=launcher.environment.frontend_variant, + repository=launcher.environment.repository, + repository_revision=launcher.environment.repository_revision, + context_dir=launcher.environment.context_dir, + ) + session.add(build_parameters_orm) + + environment_orm = schemas.EnvironmentORM( + name=launcher.name, + created_by_id=user.id, + description=f"Generated environment for {launcher.name}", + container_image="image:unknown-at-the-moment", # TODO: This should come from the build + default_url=constants.DEFAULT_URLS.get(launcher.environment.frontend_variant, "/"), + port=constants.BUILD_PORT, # TODO: This should come from the build + working_directory=constants.BUILD_WORKING_DIRECTORY, # TODO: This should come from the build + mount_directory=constants.BUILD_MOUNT_DIRECTORY, # TODO: This should come from the build + uid=constants.BUILD_UID, # TODO: This should come from the build + gid=constants.BUILD_GID, # TODO: This should come from the build + environment_kind=models.EnvironmentKind.CUSTOM, + command=None, # TODO: This should come from the build + args=None, # TODO: This should come from the build + creation_date=datetime.now(UTC).replace(microsecond=0), + environment_image_source=models.EnvironmentImageSource.build, + build_parameters_id=build_parameters_orm.id, + build_parameters=build_parameters_orm, ) session.add(environment_orm) + + start_build = True else: environment_id = ULID.from_str(launcher.environment) res_env = await session.scalars( @@ -283,6 +457,10 @@ async def insert_launcher( raise errors.MissingResourceError( message=f"Session environment with id '{environment_id}' does not exist or you do not have access to it." # noqa: E501 ) + if environment_orm.is_archived: + raise errors.ValidationError( + message="Cannot create a new session launcher with an archived environment." + ) environment = environment_orm.dump() environment_id = environment.id @@ -311,13 +489,20 @@ async def insert_launcher( description=launcher.description if launcher.description else None, environment_id=environment_id, resource_class_id=launcher.resource_class_id, + disk_storage=launcher.disk_storage, + env_variables=models.EnvVar.to_dict(launcher.env_variables) if launcher.env_variables else None, created_by_id=user.id, creation_date=datetime.now(UTC).replace(microsecond=0), ) session.add(launcher_orm) await session.flush() await session.refresh(launcher_orm) - return launcher_orm.dump() + + if start_build: + build = models.UnsavedBuild(environment_id=environment_id) + await self.start_build(user, build) + + return launcher_orm.dump() async def copy_launcher( self, user: base_models.APIUser, project_id: ULID, launcher: models.SessionLauncher @@ -340,12 +525,20 @@ async def copy_launcher( message=f"Project with id '{project_id}' does not exist or you do not have access to it." ) + if launcher.environment.environment_kind == models.EnvironmentKind.CUSTOM: + environment = self.__copy_environment(user, session, launcher.environment) + environment_id = environment.id + else: + environment_id = launcher.environment.id + launcher_orm = schemas.SessionLauncherORM( name=launcher.name, project_id=project_id, description=launcher.description, - environment_id=launcher.environment.id, + environment_id=environment_id, resource_class_id=launcher.resource_class_id, + disk_storage=launcher.disk_storage, + env_variables=models.EnvVar.to_dict(launcher.env_variables) if launcher.env_variables else None, created_by_id=user.id, creation_date=datetime.now(UTC).replace(microsecond=0), ) @@ -380,8 +573,7 @@ async def update_launcher( launcher = res.one_or_none() if launcher is None: raise errors.MissingResourceError( - message=f"Session launcher with id '{launcher_id}' does not " - "exist or you do not have access to it." + message=f"Session launcher with id '{launcher_id}' does not exist or you do not have access to it." ) authorized = await self.project_authz.has_permission( @@ -420,6 +612,14 @@ async def update_launcher( launcher.resource_class_id = patch.resource_class_id elif patch.resource_class_id is RESET: launcher.resource_class_id = None + if isinstance(patch.disk_storage, int): + launcher.disk_storage = patch.disk_storage + elif patch.disk_storage is RESET: + launcher.disk_storage = None + if isinstance(patch.env_variables, list): + launcher.env_variables = models.EnvVar.to_dict(patch.env_variables) + elif patch.env_variables is RESET: + launcher.env_variables = None if patch.environment is None: return launcher.dump() @@ -434,13 +634,13 @@ async def __update_launcher_environment( user: base_models.APIUser, launcher: schemas.SessionLauncherORM, session: AsyncSession, - update: models.EnvironmentPatch | models.UnsavedEnvironment | str, + update: models.EnvironmentPatch | models.UnsavedEnvironment | models.UnsavedBuildParameters | str, ) -> None: current_env_kind = launcher.environment.environment_kind match update, current_env_kind: case str() as env_id, _: # The environment in the launcher is set via ID, the new ID has to refer - # to an environment that is GLOBAL. + # to an environment that is global. old_environment = launcher.environment new_environment_id = ULID.from_str(env_id) res_env = await session.scalars( @@ -461,19 +661,80 @@ async def __update_launcher_environment( launcher.environment_id = new_environment_id launcher.environment = new_environment if old_environment.environment_kind == models.EnvironmentKind.CUSTOM: - # A custom environment exists but it is being updated to a global one + # A custom environment exists, but it is being updated to a global one # We remove the custom environment to avoid accumulating custom environments that are not associated # with any launchers. await session.delete(old_environment) case models.EnvironmentPatch(), models.EnvironmentKind.CUSTOM: - # Custom environment being updated - self.__update_environment(launcher.environment, update) - case models.UnsavedEnvironment() as new_custom_environment, models.EnvironmentKind.GLOBAL if ( - new_custom_environment.environment_kind == models.EnvironmentKind.CUSTOM - ): + # The custom environment is updated without changing the image source + if launcher.environment.environment_image_source == models.EnvironmentImageSource.build: + await self.__update_environment_build_parameters(launcher.environment, update) + else: + self.__update_environment(launcher.environment, update) + case models.UnsavedEnvironment() as new_custom_environment, models.EnvironmentKind.GLOBAL: # Global environment replaced by a custom one new_env = self.__insert_environment(user, session, new_custom_environment) + launcher.environment_id = new_env.id + launcher.environment = new_env + await session.flush() + case models.UnsavedEnvironment() as new_custom_environment, models.EnvironmentKind.CUSTOM: + # Custom environment with build is replaced by a custom environment with image + build_parameters = launcher.environment.build_parameters + + launcher.environment.name = update.name + launcher.environment.description = update.description + launcher.environment.container_image = update.container_image + launcher.environment.default_url = update.default_url + launcher.environment.port = update.port + launcher.environment.working_directory = update.working_directory + launcher.environment.mount_directory = update.mount_directory + launcher.environment.uid = update.uid + launcher.environment.gid = update.gid + launcher.environment.environment_kind = models.EnvironmentKind.CUSTOM + launcher.environment.command = update.command + launcher.environment.args = update.args + launcher.environment.environment_image_source = models.EnvironmentImageSource.image + launcher.environment.build_parameters_id = None + + # NOTE: Delete the build parameters since they are not used by any other environment + await session.delete(build_parameters) + + await session.flush() + case models.UnsavedBuildParameters() as new_custom_built_environment, models.EnvironmentKind.GLOBAL: + # Global environment replaced by a custom one which will be built + new_env = self.__insert_build_parameters_environment( + user, session, launcher, new_custom_built_environment + ) + launcher.environment_id = new_env.id launcher.environment = new_env + await session.flush() + case models.UnsavedBuildParameters() as new_custom_built_environment, models.EnvironmentKind.CUSTOM: + # Custom environment with image is replaced by a custom environment with build + build_parameters_orm = schemas.BuildParametersORM( + builder_variant=new_custom_built_environment.builder_variant, + frontend_variant=new_custom_built_environment.frontend_variant, + repository=new_custom_built_environment.repository, + repository_revision=new_custom_built_environment.repository_revision, + context_dir=new_custom_built_environment.context_dir, + ) + session.add(build_parameters_orm) + + launcher.environment.container_image = ( + "image:unknown-at-the-moment" # TODO: This should come from the build + ) + launcher.environment.default_url = "/lab" # TODO: This should come from the build + launcher.environment.port = 8888 # TODO: This should come from the build + launcher.environment.working_directory = None # TODO: This should come from the build + launcher.environment.mount_directory = None # TODO: This should come from the build + launcher.environment.uid = 1000 # TODO: This should come from the build + launcher.environment.gid = 1000 # TODO: This should come from the build + launcher.environment.environment_kind = models.EnvironmentKind.CUSTOM + launcher.environment.command = None # TODO: This should come from the build + launcher.environment.args = None # TODO: This should come from the build + launcher.environment.environment_image_source = models.EnvironmentImageSource.build + launcher.environment.build_parameters_id = build_parameters_orm.id + launcher.environment.build_parameters = build_parameters_orm + await session.flush() case _: raise errors.ValidationError( @@ -506,3 +767,344 @@ async def delete_launcher(self, user: base_models.APIUser, launcher_id: ULID) -> await session.delete(launcher) if launcher.environment.environment_kind == models.EnvironmentKind.CUSTOM: await session.delete(launcher.environment) + + async def get_build(self, user: base_models.APIUser, build_id: ULID) -> models.Build: + """Get a specific build.""" + async with self.session_maker() as session, session.begin(): + stmt = select(schemas.BuildORM).where(schemas.BuildORM.id == build_id) + result = await session.scalars(stmt) + build = result.one_or_none() + + not_found_message = f"Build with id '{build_id}' does not exist or you do not have access to it." + if build is None: + raise errors.MissingResourceError(message=not_found_message) + + authorized = await self._get_environment_authorization( + session=session, user=user, environment=build.environment, scope=Scope.READ + ) + if not authorized: + raise errors.MissingResourceError(message=not_found_message) + + # Check and refresh the status of in-progress builds + if user.id is not None: + await self._refresh_build(build=build, session=session, user_id=user.id) + + return build.dump() + + async def get_environment_builds(self, user: base_models.APIUser, environment_id: ULID) -> list[models.Build]: + """Get all builds from a session environment.""" + + if not user.is_authenticated or user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + environment = await session.scalar( + select(schemas.EnvironmentORM).where(schemas.EnvironmentORM.id == environment_id) + ) + + not_found_message = ( + f"Session environment with id '{environment_id}' does not exist or you do not have access to it." + ) + if environment is None: + raise errors.MissingResourceError(message=not_found_message) + + authorized = await self._get_environment_authorization( + session=session, user=user, environment=environment, scope=Scope.READ + ) + if not authorized: + raise errors.MissingResourceError(message=not_found_message) + + stmt = ( + select(schemas.BuildORM) + .where(schemas.BuildORM.environment_id == environment_id) + .order_by(schemas.BuildORM.id.desc()) + ) + result = await session.scalars(stmt) + builds = result.all() + + # Check and refresh the status of in-progress builds + for build in builds: + await self._refresh_build(build=build, session=session, user_id=user.id) + + return [build.dump() for build in builds] + + async def start_build(self, user: base_models.APIUser, build: models.UnsavedBuild) -> models.Build: + """Insert a new build.""" + if not user.is_authenticated or user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + environment = await session.scalar( + select(schemas.EnvironmentORM).where(schemas.EnvironmentORM.id == build.environment_id) + ) + + not_found_message = ( + f"Session environment with id '{build.environment_id}' does not exist or you do not have access to it." + ) + if environment is None: + raise errors.MissingResourceError(message=not_found_message) + + authorized = await self._get_environment_authorization( + session=session, user=user, environment=environment, scope=Scope.READ + ) + if not authorized: + raise errors.MissingResourceError(message=not_found_message) + + if environment.environment_kind == models.EnvironmentKind.GLOBAL: + launcher_orm = None + else: + launcher_orm = await session.scalar( + select(schemas.SessionLauncherORM).where( + schemas.SessionLauncherORM.environment_id == build.environment_id + ) + ) + + build_parameters = environment.build_parameters.dump() + + # We check if there is any in-progress build + in_progress_builds = await session.stream_scalars( + select(schemas.BuildORM) + .where(schemas.BuildORM.environment_id == build.environment_id) + .where(schemas.BuildORM.status == models.BuildStatus.in_progress) + .order_by(schemas.BuildORM.id.desc()) + ) + async for item in in_progress_builds: + await self._refresh_build(build=item, session=session, user_id=user.id) + if item.status == models.BuildStatus.in_progress: + raise errors.ConflictError( + message=f"Session environment with id '{build.environment_id}' already has a build in progress." + ) + + build_orm = schemas.BuildORM( + environment_id=build.environment_id, + status=models.BuildStatus.in_progress, + ) + session.add(build_orm) + await session.flush() + await session.refresh(build_orm) + + result = build_orm.dump() + launcher = launcher_orm.dump() if launcher_orm is not None else None + + if self.shipwright_client is not None: + params = self._get_buildrun_params( + user=user, build=result, build_parameters=build_parameters, launcher=launcher + ) + await self.shipwright_client.create_image_build(params=params, user_id=user.id) + else: + logger.error("Shipwright client is None") + + return result + + async def update_build(self, user: base_models.APIUser, build_id: ULID, patch: models.BuildPatch) -> models.Build: + """Update a build entry.""" + if not user.is_authenticated or user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + stmt = select(schemas.BuildORM).where(schemas.BuildORM.id == build_id) + result = await session.scalars(stmt) + build = result.one_or_none() + + not_found_message = f"Build with id '{build_id}' does not exist or you do not have access to it." + if build is None: + raise errors.MissingResourceError(message=not_found_message) + + authorized = await self._get_environment_authorization( + session=session, user=user, environment=build.environment, scope=Scope.WRITE + ) + if not authorized: + raise errors.MissingResourceError(message=not_found_message) + + # Check and refresh the status of in-progress builds + await self._refresh_build(build=build, session=session, user_id=user.id) + + if build.status == models.BuildStatus.succeeded or build.status == models.BuildStatus.failed: + raise errors.ValidationError( + message=f"Cannot update build with id '{build_id}': the build has status {build.status}." + ) + + # Only accept build cancellations + if patch.status == models.BuildStatus.cancelled: + build.status = patch.status + + await session.flush() + await session.refresh(build) + + build_model = build.dump() + + if self.shipwright_client is not None: + await self.shipwright_client.cancel_build_run(name=build_model.k8s_name, user_id=user.id) + else: + logger.error("Shipwright client is None") + + return build_model + + async def get_build_logs( + self, user: base_models.APIUser, build_id: ULID, max_log_lines: int | None = None + ) -> dict[str, str]: + """Get the logs of a build by querying Shipwright.""" + if not user.is_authenticated or user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + stmt = select(schemas.BuildORM).where(schemas.BuildORM.id == build_id) + result = await session.scalars(stmt) + build = result.one_or_none() + + if build is None: + raise errors.MissingResourceError( + message=f"Build with id '{build_id}' does not exist or you do not have access to it." + ) + + if build.environment.environment_kind == models.EnvironmentKind.GLOBAL: + authorized = True + else: + launcher = await session.scalar( + select(schemas.SessionLauncherORM).where( + schemas.SessionLauncherORM.environment_id == build.environment_id + ) + ) + if launcher is None: + authorized = False + else: + authorized = await self.project_authz.has_permission( + user, ResourceType.project, launcher.project_id, Scope.WRITE + ) + if not authorized: + raise errors.MissingResourceError( + message=f"Build with id '{build_id}' does not exist or you do not have access to it." + ) + + build_model = build.dump() + + if self.shipwright_client is None: + raise errors.MissingResourceError(message=f"Build with id '{build_id}' does not have logs.") + + return await self.shipwright_client.get_image_build_logs( + buildrun_name=build_model.k8s_name, user_id=user.id, max_log_lines=max_log_lines + ) + + async def _refresh_build(self, build: schemas.BuildORM, session: AsyncSession, user_id: str) -> None: + """Refresh the status of a build by querying Shipwright.""" + if build.status != models.BuildStatus.in_progress: + return + + # Note: We can't get an update about the build if there is no client for Shipwright. + if self.shipwright_client is None: + logger.error("Shipwright client is None") + return + + # TODO: consider how we can parallelize calls to `shipwright_client` for refreshes. + status_update = await self.shipwright_client.update_image_build_status( + buildrun_name=build.dump().k8s_name, user_id=user_id + ) + + if status_update.update is None: + return + + update = status_update.update + if update is not None and update.status == models.BuildStatus.failed: + build.status = models.BuildStatus.failed + build.completed_at = update.completed_at + build.error_reason = update.error_reason + elif update is not None and update.status == models.BuildStatus.succeeded and update.result is not None: + build.status = models.BuildStatus.succeeded + build.completed_at = update.completed_at + build.result_image = update.result.image + build.result_repository_url = update.result.repository_url + build.result_repository_git_commit_sha = update.result.repository_git_commit_sha + # Also update the session environment here + # TODO: move this to its own method where build parameters determine args + environment = build.environment + environment.container_image = build.result_image + # An older version was hardcoding the values but we can and should + # rely on the defaults for args and command + if environment.command is not None: + environment.command = None + if environment.args is not None: + environment.args = None + + await session.flush() + await session.refresh(build) + + def _get_buildrun_params( + self, + user: base_models.APIUser, + build: models.Build, + build_parameters: models.BuildParameters, + launcher: models.SessionLauncher | None, + ) -> models.ShipwrightBuildRunParams: + """Derive the Shipwright BuildRun params from a Build instance and a BuildParameters instance.""" + if not user.is_authenticated or user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + git_repository = build_parameters.repository + git_repository_revision = build_parameters.repository_revision + context_dir = build_parameters.context_dir + + output_image_prefix = ( + self.builds_config.build_output_image_prefix or constants.BUILD_DEFAULT_OUTPUT_IMAGE_PREFIX + ) + output_image_name = constants.BUILD_OUTPUT_IMAGE_NAME + output_image_tag = build.k8s_name + output_image = f"{output_image_prefix}{output_image_name}:{output_image_tag}" + + # TODO: define the build strategy from `build_parameters` + build_strategy_name = self.builds_config.build_strategy_name or constants.BUILD_DEFAULT_BUILD_STRATEGY_NAME + push_secret_name = self.builds_config.push_secret_name or constants.BUILD_DEFAULT_PUSH_SECRET_NAME + + retention_after_failed = ( + self.builds_config.buildrun_retention_after_failed or constants.BUILD_RUN_DEFAULT_RETENTION_AFTER_FAILED + ) + retention_after_succeeded = ( + self.builds_config.buildrun_retention_after_succeeded + or constants.BUILD_RUN_DEFAULT_RETENTION_AFTER_SUCCEEDED + ) + build_timeout = self.builds_config.buildrun_build_timeout or constants.BUILD_RUN_DEFAULT_TIMEOUT + + labels: dict[str, str] = { + "renku.io/safe-username": user.id, + } + annotations: dict[str, str] = { + "renku.io/build_id": str(build.id), + "renku.io/environment_id": str(build.environment_id), + } + if launcher: + annotations["renku.io/launcher_id"] = str(launcher.id) + annotations["renku.io/project_id"] = str(launcher.project_id) + + return models.ShipwrightBuildRunParams( + name=build.k8s_name, + git_repository=git_repository, + build_image=constants.BUILD_BUILDER_IMAGE, + run_image=constants.BUILD_RUN_IMAGE, + output_image=output_image, + build_strategy_name=build_strategy_name, + push_secret_name=push_secret_name, + retention_after_failed=retention_after_failed, + retention_after_succeeded=retention_after_succeeded, + build_timeout=build_timeout, + node_selector=self.builds_config.node_selector, + tolerations=self.builds_config.tolerations, + labels=labels, + annotations=annotations, + frontend=build_parameters.frontend_variant, + git_repository_revision=git_repository_revision, + context_dir=context_dir, + ) + + async def _get_environment_authorization( + self, session: AsyncSession, user: base_models.APIUser, environment: schemas.EnvironmentORM, scope: Scope + ) -> bool: + """Checks whether the provided user has a specific permission on a session environment.""" + if environment.environment_kind == models.EnvironmentKind.GLOBAL: + return scope == Scope.READ or user.is_admin + + launcher = await session.scalar( + select(schemas.SessionLauncherORM).where(schemas.SessionLauncherORM.environment_id == environment.id) + ) + authorized = False + if launcher: + authorized = await self.project_authz.has_permission(user, ResourceType.project, launcher.project_id, scope) + return authorized diff --git a/components/renku_data_services/session/k8s_client.py b/components/renku_data_services/session/k8s_client.py new file mode 100644 index 000000000..1341f5a6d --- /dev/null +++ b/components/renku_data_services/session/k8s_client.py @@ -0,0 +1,317 @@ +"""An abstraction over the kr8s kubernetes client and the k8s-watcher.""" + +from collections.abc import AsyncIterable +from typing import TYPE_CHECKING + +import httpx +from kr8s import NotFoundError, ServerError +from kr8s.asyncio.objects import APIObject, Pod + +from renku_data_services import errors +from renku_data_services.errors.errors import CannotStartBuildError +from renku_data_services.k8s.constants import ClusterId +from renku_data_services.k8s.models import GVK, K8sObjectFilter, K8sObjectMeta +from renku_data_services.notebooks.api.classes.k8s_client import DEFAULT_K8S_CLUSTER +from renku_data_services.notebooks.util.retries import retry_with_exponential_backoff_async +from renku_data_services.session import crs, models +from renku_data_services.session.constants import ( + BUILD_RUN_GVK, + DUMMY_TASK_RUN_USER_ID, + TASK_RUN_GVK, +) +from renku_data_services.session.crs import BuildRun, TaskRun + +if TYPE_CHECKING: + from renku_data_services.k8s.clients import K8sClusterClientsPool + + +# NOTE The type ignore below is because the kr8s library has no type stubs, they claim pyright better handles type hints +class ShipwrightBuildRunV1Beta1Kr8s(APIObject): + """Spec for Shipwright BuildRuns used by the k8s client.""" + + kind: str = BUILD_RUN_GVK.kind + version: str = BUILD_RUN_GVK.group_version + namespaced: bool = True + plural: str = "buildruns" + singular: str = "buildrun" + scalable: bool = False + endpoint: str = "buildruns" + + +# NOTE The type ignore below is because the kr8s library has no type stubs, they claim pyright better handles type hints +class TektonTaskRunV1Kr8s(APIObject): + """Spec for Tekton TaskRuns used by the k8s client.""" + + kind: str = TASK_RUN_GVK.kind + version: str = TASK_RUN_GVK.group_version + namespaced: bool = True + plural: str = "taskruns" + singular: str = "taskrun" + scalable: bool = False + endpoint: str = "taskruns" + + +class ShipwrightClient: + """The K8s client that combines a base client and a cache. + + No authentication or authorization is performed - this is the responsibility of the caller. + """ + + def __init__( + self, + client: "K8sClusterClientsPool", + namespace: str, + ) -> None: + self.client = client + self.namespace = namespace + + @staticmethod + def cluster_id() -> ClusterId: + """Cluster id of the main cluster.""" + return DEFAULT_K8S_CLUSTER + + async def list_build_runs(self, user_id: str) -> AsyncIterable[BuildRun]: + """Get a list of Shipwright BuildRuns.""" + builds = self.client.list(K8sObjectFilter(namespace=self.namespace, gvk=BUILD_RUN_GVK, user_id=user_id)) + async for build in builds: + yield BuildRun.model_validate(build.manifest.to_dict()) + return + + async def get_build_run(self, name: str, user_id: str) -> BuildRun | None: + """Get a Shipwright BuildRun.""" + result = await self.client.get( + K8sObjectMeta( + name=name, + namespace=self.namespace, + cluster=self.cluster_id(), + gvk=BUILD_RUN_GVK, + user_id=user_id, + ) + ) + if result is None: + return None + + return BuildRun.model_validate(result.manifest.to_dict()) + + async def create_build_run(self, manifest: BuildRun, user_id: str) -> BuildRun: + """Create a new Shipwright BuildRun.""" + manifest.metadata.namespace = self.namespace + build_run_name = manifest.metadata.name + await self.client.create( + K8sObjectMeta( + name=build_run_name, + namespace=self.namespace, + cluster=self.cluster_id(), + gvk=BUILD_RUN_GVK, + user_id=user_id, + ).with_manifest(manifest=manifest.model_dump(exclude_none=True, mode="json")) + ) + build_resource = await retry_with_exponential_backoff_async(lambda x: x is None)(self.get_build_run)( + build_run_name, user_id + ) + if build_resource is None: + raise CannotStartBuildError(message=f"Cannot create the image build {build_run_name}") + return build_resource + + async def delete_build_run(self, name: str, user_id: str) -> None: + """Delete a Shipwright BuildRun.""" + return await self.client.delete( + K8sObjectMeta( + name=name, + namespace=self.namespace, + cluster=self.cluster_id(), + gvk=BUILD_RUN_GVK, + user_id=user_id, + ) + ) + + async def cancel_build_run(self, name: str, user_id: str) -> BuildRun: + """Cancel a Shipwright BuildRun.""" + build = await self.client.patch( + K8sObjectMeta( + name=name, + namespace=self.namespace, + cluster=self.cluster_id(), + gvk=BUILD_RUN_GVK, + user_id=user_id, + ), + patch={"spec": {"state": "BuildRunCanceled"}}, + ) + return BuildRun.model_validate(build.manifest.to_dict()) + + async def get_task_run(self, name: str) -> TaskRun | None: + """Get a Tekton TaskRun. + + Note: since we can't store custom labels on tekton task runs, we use hard-coded fixed user id in the cache db. + """ + task = await self.client.get( + K8sObjectMeta( + name=name, + namespace=self.namespace, + cluster=self.cluster_id(), + gvk=TASK_RUN_GVK, + user_id=DUMMY_TASK_RUN_USER_ID, + ) + ) + if task is None: + return task + return TaskRun.model_validate(task.manifest.to_dict()) + + async def create_image_build(self, params: models.ShipwrightBuildRunParams, user_id: str) -> None: + """Create a new BuildRun in Shipwright to support a newly created build.""" + metadata = crs.Metadata(name=params.name) + if params.annotations: + metadata.annotations = params.annotations + if params.labels: + metadata.labels = params.labels + + retention: crs.Retention | None = None + if params.retention_after_failed or params.retention_after_succeeded: + retention_after_failed = ( + int(params.retention_after_failed.total_seconds()) if params.retention_after_failed else None + ) + retention_after_succeeded = ( + int(params.retention_after_succeeded.total_seconds()) if params.retention_after_succeeded else None + ) + retention = crs.Retention( + ttlAfterFailed=f"{retention_after_failed}s" if retention_after_failed else None, + ttlAfterSucceeded=f"{retention_after_succeeded}s" if retention_after_succeeded else None, + ) + + build_run = BuildRun( + metadata=metadata, + spec=crs.BuildRunSpec( + build=crs.Build( + spec=crs.BuildSpec( + source=crs.GitSource( + git=crs.Git(url=params.git_repository, revision=params.git_repository_revision), + contextDir=params.context_dir, + ), + strategy=crs.Strategy(kind="BuildStrategy", name=params.build_strategy_name), + paramValues=[ + crs.ParamValue(name="frontend", value=params.frontend), + crs.ParamValue(name="run-image", value=params.run_image), + crs.ParamValue(name="builder-image", value=params.build_image), + ], + output=crs.BuildOutput( + image=params.output_image, + pushSecret=params.push_secret_name, + ), + timeout=f"{params.build_timeout.total_seconds()}s" if params.build_timeout else None, + nodeSelector=params.node_selector, + tolerations=params.tolerations, + ) + ), + retention=retention, + ), + ) + await self.create_build_run(build_run, user_id) + + async def update_image_build_status(self, buildrun_name: str, user_id: str) -> models.ShipwrightBuildStatusUpdate: + """Update the status of a build by pulling the corresponding BuildRun from Shipwright.""" + k8s_build = await self.get_build_run(name=buildrun_name, user_id=user_id) + + if k8s_build is None: + return models.ShipwrightBuildStatusUpdate( + update=models.ShipwrightBuildStatusUpdateContent(status=models.BuildStatus.failed) + ) + + k8s_build_status = k8s_build.status + completion_time = k8s_build_status.completionTime if k8s_build_status else None + + if k8s_build_status is None or completion_time is None: + return models.ShipwrightBuildStatusUpdate(update=None) + + conditions = k8s_build_status.conditions + condition = next(filter(lambda c: c.type == "Succeeded", conditions or []), None) + + buildSpec = k8s_build_status.buildSpec + output = buildSpec.output if buildSpec else None + result_image = output.image if output else "unknown" + + source = buildSpec.source if buildSpec else None + git_obj = source.git if source else None + result_repository_url = git_obj.url if git_obj else "unknown" + + source_2 = k8s_build_status.source + git_obj_2 = source_2.git if source_2 else None + result_repository_git_commit_sha = git_obj_2.commitSha if git_obj_2 else None + result_repository_git_commit_sha = result_repository_git_commit_sha or "unknown" + + if condition is not None and condition.status == "True": + return models.ShipwrightBuildStatusUpdate( + update=models.ShipwrightBuildStatusUpdateContent( + status=models.BuildStatus.succeeded, + completed_at=completion_time, + result=models.BuildResult( + completed_at=completion_time, + image=result_image, + repository_url=result_repository_url, + repository_git_commit_sha=result_repository_git_commit_sha, + ), + ) + ) + else: + return models.ShipwrightBuildStatusUpdate( + update=models.ShipwrightBuildStatusUpdateContent( + status=models.BuildStatus.failed, + completed_at=completion_time, + error_reason=condition.reason if condition is not None else None, + ) + ) + + async def get_image_build_logs( + self, buildrun_name: str, user_id: str, max_log_lines: int | None = None + ) -> dict[str, str]: + """Get the logs from a Shipwright BuildRun.""" + buildrun = await self.get_build_run(name=buildrun_name, user_id=user_id) + if not buildrun: + raise errors.MissingResourceError(message=f"Cannot find buildrun {buildrun_name} to retrieve logs.") + status = buildrun.status + task_run_name = status.taskRunName if status else None + if not task_run_name: + raise errors.MissingResourceError( + message=f"The buildrun {buildrun_name} has no taskrun to retrieve logs from." + ) + taskrun = await self.get_task_run(name=task_run_name) + if not taskrun: + raise errors.MissingResourceError( + message=f"Cannot find taskrun from buildrun {buildrun_name} to retrieve logs." + ) + pod_name = taskrun.status.podName if taskrun.status else None + if not pod_name: + raise errors.MissingResourceError(message=f"The buildrun {buildrun_name} has no pod to retrieve logs from.") + return await self._get_pod_logs(name=pod_name, max_log_lines=max_log_lines) + + async def _get_pod_logs(self, name: str, max_log_lines: int | None = None) -> dict[str, str]: + """Get the logs of all containers in a given pod.""" + result = await self.client.get( + K8sObjectMeta( + name=name, namespace=self.namespace, cluster=self.cluster_id(), gvk=GVK(kind="Pod", version="v1") + ) + ) + logs: dict[str, str] = {} + if result is None: + return logs + cluster = self.client.cluster_by_id(result.cluster) + + obj = result.to_api_object(cluster.api) + result = Pod(resource=obj, namespace=obj.namespace, api=cluster.api) + + containers = [container.name for container in result.spec.containers + result.spec.get("initContainers", [])] + for container in containers: + try: + # NOTE: calling pod.logs without a container name set crashes the library + clogs: list[str] = [clog async for clog in result.logs(container=container, tail_lines=max_log_lines)] + except httpx.ResponseNotRead: + # NOTE: This occurs when the container is still starting, but we try to read its logs + continue + except NotFoundError as err: + raise errors.MissingResourceError(message=f"The pod {name} does not exist.") from err + except ServerError as err: + if err.response is not None and err.response.status_code == 404: + raise errors.MissingResourceError(message=f"The pod {name} does not exist.") from err + raise + else: + logs[container] = "\n".join(clogs) + return logs diff --git a/components/renku_data_services/session/models.py b/components/renku_data_services/session/models.py index b3dfc93d5..03a24c2ba 100644 --- a/components/renku_data_services/session/models.py +++ b/components/renku_data_services/session/models.py @@ -1,14 +1,22 @@ """Models for sessions.""" +import typing from dataclasses import dataclass -from datetime import datetime +from datetime import datetime, timedelta from enum import StrEnum from pathlib import PurePosixPath +from typing import TYPE_CHECKING from ulid import ULID from renku_data_services import errors from renku_data_services.base_models.core import ResetType +from renku_data_services.session import crs + +if TYPE_CHECKING: + from renku_data_services.session import apispec + +from .constants import ENV_VARIABLE_NAME_MATCHER, ENV_VARIABLE_REGEX @dataclass(frozen=True, eq=True, kw_only=True) @@ -21,8 +29,46 @@ class Member: class EnvironmentKind(StrEnum): """The type of environment.""" - GLOBAL: str = "GLOBAL" - CUSTOM: str = "CUSTOM" + GLOBAL = "GLOBAL" + CUSTOM = "CUSTOM" + + +class EnvironmentImageSource(StrEnum): + """The source of the environment image.""" + + image = "image" + build = "build" + + +class BuilderVariant(StrEnum): + """The type of environment builder.""" + + python = "python" + + +class FrontendVariant(StrEnum): + """The environment frontend choice.""" + + vscodium = "vscodium" + jupyterlab = "jupyterlab" + + +@dataclass(kw_only=True, frozen=True, eq=True) +class UnsavedBuildParameters: + """The parameters of a build.""" + + repository: str + builder_variant: str + frontend_variant: str + repository_revision: str | None = None + context_dir: str | None = None + + +@dataclass(kw_only=True, frozen=True, eq=True) +class BuildParameters(UnsavedBuildParameters): + """BuildParameters saved in the database.""" + + id: ULID @dataclass(kw_only=True, frozen=True, eq=True) @@ -39,8 +85,10 @@ class UnsavedEnvironment: uid: int = 1000 gid: int = 1000 environment_kind: EnvironmentKind + environment_image_source: EnvironmentImageSource args: list[str] | None = None command: list[str] | None = None + is_archived: bool = False def __post_init__(self) -> None: if self.working_directory and not self.working_directory.is_absolute(): @@ -71,26 +119,22 @@ class Environment(UnsavedEnvironment): mount_directory: PurePosixPath | None uid: int gid: int + build_parameters: BuildParameters | None + build_parameters_id: ULID | None @dataclass(kw_only=True, frozen=True, eq=True) -class EnvironmentUpdate: - """Model for the update of some or all parts of an environment.""" +class BuildParametersPatch: + """Patch for parameters of a build.""" - name: str | None = None - description: str | None = None - container_image: str | None = None - default_url: str | None = None - port: int | None = None - working_directory: PurePosixPath | None = None - mount_directory: PurePosixPath | None = None - uid: int | None = None - gid: int | None = None - args: list[str] | None | ResetType = None - command: list[str] | None | ResetType = None + repository: str | None = None + builder_variant: str | None = None + frontend_variant: str | None = None + repository_revision: str | None = None + context_dir: str | None = None -@dataclass(frozen=True, eq=True, kw_only=True) +@dataclass(eq=True, kw_only=True) class EnvironmentPatch: """Model for changes requested on a session environment.""" @@ -99,12 +143,64 @@ class EnvironmentPatch: container_image: str | None = None default_url: str | None = None port: int | None = None - working_directory: PurePosixPath | None = None - mount_directory: PurePosixPath | None = None + working_directory: PurePosixPath | ResetType | None = None + mount_directory: PurePosixPath | ResetType | None = None uid: int | None = None gid: int | None = None args: list[str] | None | ResetType = None command: list[str] | None | ResetType = None + is_archived: bool | None = None + build_parameters: BuildParametersPatch | None = None + environment_image_source: EnvironmentImageSource | None = None + + +# TODO: Verify that these limits are compatible with k8s +MAX_NUMBER_ENV_VARIABLES: typing.Final[int] = 32 +MAX_LENGTH_ENV_VARIABLES_NAME: typing.Final[int] = 256 +MAX_LENGTH_ENV_VARIABLES_VALUE: typing.Final[int] = 1000 + + +@dataclass(frozen=True, eq=True, kw_only=True) +class EnvVar: + """Model for an environment variable.""" + + name: str + value: str | None = None + + @classmethod + def from_dict(cls, env_dict: dict[str, str | None]) -> list["EnvVar"]: + """Create a list of EnvVar instances from a dictionary.""" + return [cls(name=name, value=value) for name, value in env_dict.items()] + + @classmethod + def from_apispec(cls, env_variables: list["apispec.EnvVar"]) -> list["EnvVar"]: + """Create a list of EnvVar instances from apispec objects.""" + return [cls(name=env_var.name, value=env_var.value) for env_var in env_variables] + + @classmethod + def to_dict(cls, env_variables: list["EnvVar"]) -> dict[str, str | None]: + """Convert to dict.""" + return {var.name: var.value for var in env_variables} + + def __post_init__(self) -> None: + error_msgs: list[str] = [] + if len(self.name) > MAX_LENGTH_ENV_VARIABLES_NAME: + error_msgs.append( + f"Env variable name '{self.name}' is longer than {MAX_LENGTH_ENV_VARIABLES_NAME} characters." + ) + if self.name.upper().startswith("RENKU"): + error_msgs.append(f"Env variable name '{self.name}' should not start with 'RENKU'.") + if ENV_VARIABLE_NAME_MATCHER.match(self.name) is None: + error_msgs.append(f"Env variable name '{self.name}' must match the regex '{ENV_VARIABLE_REGEX}'.") + if self.value and len(self.value) > MAX_LENGTH_ENV_VARIABLES_VALUE: + error_msgs.append( + f"Env variable value for '{self.name}' is longer than {MAX_LENGTH_ENV_VARIABLES_VALUE} characters." + ) + + if error_msgs: + if len(error_msgs) == 1: + raise errors.ValidationError(message=error_msgs[0]) + raise errors.ValidationError(message="\n".join(error_msgs)) @dataclass(frozen=True, eq=True, kw_only=True) @@ -115,7 +211,9 @@ class UnsavedSessionLauncher: name: str description: str | None resource_class_id: int | None - environment: str | UnsavedEnvironment + disk_storage: int | None + env_variables: list[EnvVar] | None + environment: str | UnsavedEnvironment | UnsavedBuildParameters """When a string is passed for the environment it should be the ID of an existing environment.""" @@ -135,7 +233,103 @@ class SessionLauncherPatch: name: str | None = None description: str | None = None - # NOTE: When unsaved environment is used it means a brand new environment should be created for the + # NOTE: When unsaved environment is used it means a brand-new environment should be created for the # launcher with the update of the launcher. - environment: str | EnvironmentPatch | UnsavedEnvironment | None = None + environment: str | EnvironmentPatch | UnsavedEnvironment | UnsavedBuildParameters | None = None resource_class_id: int | None | ResetType = None + disk_storage: int | None | ResetType = None + env_variables: list[EnvVar] | None | ResetType = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class BuildResult: + """Model to represent the result of a build of a container image.""" + + image: str + completed_at: datetime + repository_url: str + repository_git_commit_sha: str + + +class BuildStatus(StrEnum): + """The status of a build.""" + + in_progress = "in_progress" + failed = "failed" + cancelled = "cancelled" + succeeded = "succeeded" + + +@dataclass(frozen=True, eq=True, kw_only=True) +class Build: + """Model to represent the build of a container image.""" + + id: ULID + environment_id: ULID + created_at: datetime + status: BuildStatus + result: BuildResult | None = None + error_reason: str | None = None + + @property + def k8s_name(self) -> str: + """Returns the name of the corresponding Shipwright BuildRun.""" + name = f"renku-{self.id}" + return name.lower() + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UnsavedBuild: + """Model to represent a requested container image build.""" + + environment_id: ULID + + +@dataclass(frozen=True, eq=True, kw_only=True) +class BuildPatch: + """Model to represent the requested update to a container image build.""" + + status: BuildStatus | None = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class ShipwrightBuildRunParams: + """Model to represent the parameters used to create a new Shipwright BuildRun.""" + + name: str + git_repository: str + run_image: str + output_image: str + build_strategy_name: str + push_secret_name: str + retention_after_failed: timedelta | None = None + retention_after_succeeded: timedelta | None = None + build_timeout: timedelta | None = None + node_selector: dict[str, str] | None = None + tolerations: list[crs.Toleration] | None = None + labels: dict[str, str] | None = None + annotations: dict[str, str] | None = None + frontend: str = FrontendVariant.vscodium.value + build_image: str | None = None + git_repository_revision: str | None = None + context_dir: str | None = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class ShipwrightBuildStatusUpdateContent: + """Model to represent an update about a build from Shipwright.""" + + status: BuildStatus + result: BuildResult | None = None + completed_at: datetime | None = None + error_reason: str | None = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class ShipwrightBuildStatusUpdate: + """Model to represent an update about a build from Shipwright.""" + + update: ShipwrightBuildStatusUpdateContent | None + """The update about a build. + + None represents "no update".""" diff --git a/components/renku_data_services/session/orm.py b/components/renku_data_services/session/orm.py index 3b9442180..f3cee6859 100644 --- a/components/renku_data_services/session/orm.py +++ b/components/renku_data_services/session/orm.py @@ -3,12 +3,13 @@ from datetime import datetime from pathlib import PurePosixPath -from sqlalchemy import JSON, DateTime, MetaData, String, func +from sqlalchemy import JSON, BigInteger, Boolean, DateTime, MetaData, String, false, func from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, relationship from sqlalchemy.schema import ForeignKey from ulid import ULID +from renku_data_services import errors from renku_data_services.crc.orm import ResourceClassORM from renku_data_services.project.orm import ProjectORM from renku_data_services.session import models @@ -55,6 +56,10 @@ class EnvironmentORM(BaseORM): uid: Mapped[int] = mapped_column("uid") gid: Mapped[int] = mapped_column("gid") environment_kind: Mapped[models.EnvironmentKind] = mapped_column("environment_kind") + environment_image_source: Mapped[models.EnvironmentImageSource] = mapped_column( + "environment_image_source", server_default="image", nullable=False + ) + args: Mapped[list[str] | None] = mapped_column("args", JSONVariant, nullable=True) command: Mapped[list[str] | None] = mapped_column("command", JSONVariant, nullable=True) @@ -63,6 +68,19 @@ class EnvironmentORM(BaseORM): ) """Creation date and time.""" + is_archived: Mapped[bool] = mapped_column( + "is_archived", Boolean(), default=False, server_default=false(), nullable=False + ) + + build_parameters_id: Mapped[ULID | None] = mapped_column( + "build_parameters_id", + ForeignKey("build_parameters.id", ondelete="CASCADE", name="environments_build_parameters_id_fk"), + nullable=True, + server_default=None, + default=None, + ) + build_parameters: Mapped["BuildParametersORM"] = relationship(lazy="joined", default=None) + def dump(self) -> models.Environment: """Create a session environment model from the EnvironmentORM.""" return models.Environment( @@ -81,6 +99,10 @@ def dump(self) -> models.Environment: port=self.port, args=self.args, command=self.command, + is_archived=self.is_archived, + environment_image_source=self.environment_image_source, + build_parameters=self.build_parameters.dump() if self.build_parameters else None, + build_parameters_id=self.build_parameters_id, ) @@ -128,6 +150,14 @@ class SessionLauncherORM(BaseORM): ) """Id of the resource class.""" + disk_storage: Mapped[int | None] = mapped_column("disk_storage", BigInteger, default=None, nullable=True) + """Default value for requested disk storage.""" + + env_variables: Mapped[dict[str, str | None] | None] = mapped_column( + "env_variables", JSONVariant, default=None, nullable=True + ) + """Environment variables to set in the session.""" + @classmethod def load(cls, launcher: models.SessionLauncher) -> "SessionLauncherORM": """Create SessionLauncherORM from the session launcher model.""" @@ -139,6 +169,8 @@ def load(cls, launcher: models.SessionLauncher) -> "SessionLauncherORM": project_id=launcher.project_id, environment_id=launcher.environment.id, resource_class_id=launcher.resource_class_id, + disk_storage=launcher.disk_storage, + env_variables=models.EnvVar.to_dict(launcher.env_variables) if launcher.env_variables else None, ) def dump(self) -> models.SessionLauncher: @@ -151,5 +183,98 @@ def dump(self) -> models.SessionLauncher: creation_date=self.creation_date, description=self.description, resource_class_id=self.resource_class_id, + disk_storage=self.disk_storage, + env_variables=models.EnvVar.from_dict(self.env_variables) if self.env_variables else None, environment=self.environment.dump(), ) + + +class BuildParametersORM(BaseORM): + """A Renku 2.0 session build parameters.""" + + __tablename__ = "build_parameters" + + id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + """Id of this session build parameters object.""" + + repository: Mapped[str] = mapped_column("repository", String(500)) + + builder_variant: Mapped[str] = mapped_column("builder_variant", String(99)) + + frontend_variant: Mapped[str] = mapped_column("frontend_variant", String(99)) + + repository_revision: Mapped[str | None] = mapped_column( + "repository_revision", String(500), nullable=True, default=None + ) + + context_dir: Mapped[str | None] = mapped_column("context_dir", String(500), nullable=True, default=None) + + def dump(self) -> models.BuildParameters: + """Create a session build parameters model from the BuildParametersORM.""" + return models.BuildParameters( + id=self.id, + repository=self.repository, + builder_variant=self.builder_variant, + frontend_variant=self.frontend_variant, + repository_revision=self.repository_revision, + context_dir=self.context_dir, + ) + + +class BuildORM(BaseORM): + """A build of a container image.""" + + __tablename__ = "builds" + + id: Mapped[ULID] = mapped_column("id", ULIDType, primary_key=True, default_factory=lambda: str(ULID()), init=False) + """ID of this container image build.""" + + environment_id: Mapped[ULID] = mapped_column("environment_id", ForeignKey(EnvironmentORM.id, ondelete="CASCADE")) + environment: Mapped[EnvironmentORM] = relationship(init=False, repr=False, lazy="selectin") + + status: Mapped[models.BuildStatus] = mapped_column("status") + + created_at: Mapped[datetime] = mapped_column( + "created_at", DateTime(timezone=True), default=func.now(), nullable=False + ) + + result_image: Mapped[str | None] = mapped_column("result_image", String(500), default=None) + + completed_at: Mapped[datetime | None] = mapped_column("completed_at", DateTime(timezone=True), default=None) + + result_repository_url: Mapped[str | None] = mapped_column("result_repository_url", String(500), default=None) + + result_repository_git_commit_sha: Mapped[str | None] = mapped_column( + "result_repository_git_commit_sha", String(100), default=None + ) + + error_reason: Mapped[str | None] = mapped_column("error_reason", String(500), default=None) + + def dump(self) -> models.Build: + """Create a build object from the ORM object.""" + result = self._dump_result() + return models.Build( + id=self.id, + environment_id=self.environment_id, + created_at=self.created_at, + status=self.status, + result=result, + error_reason=self.error_reason, + ) + + def _dump_result(self) -> models.BuildResult | None: + if self.status != models.BuildStatus.succeeded: + return None + if ( + self.result_image is None + or self.completed_at is None + or self.result_repository_url is None + or self.result_repository_git_commit_sha is None + ): + raise errors.ProgrammingError(message=f"Build with id '{self.id}' is invalid.") + return models.BuildResult( + image=self.result_image, + completed_at=self.completed_at, + repository_url=self.result_repository_url, + repository_git_commit_sha=self.result_repository_git_commit_sha, + ) diff --git a/components/renku_data_services/solr/__init__.py b/components/renku_data_services/solr/__init__.py new file mode 100644 index 000000000..2a43de7d3 --- /dev/null +++ b/components/renku_data_services/solr/__init__.py @@ -0,0 +1 @@ +"""Client library to solr and mapping for renku entities.""" diff --git a/components/renku_data_services/solr/entity_documents.py b/components/renku_data_services/solr/entity_documents.py new file mode 100644 index 000000000..cf56f53b0 --- /dev/null +++ b/components/renku_data_services/solr/entity_documents.py @@ -0,0 +1,313 @@ +"""Defines the entity documents used with Solr.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from datetime import UTC, datetime +from enum import StrEnum +from typing import Annotated, Any, Literal, Self + +from pydantic import ( + AliasChoices, + BaseModel, + BeforeValidator, + Field, + errors, + field_serializer, + field_validator, +) +from ulid import ULID + +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.core import ( + DataConnectorSlug, + NamespacePath, + NamespaceSlug, + ProjectPath, + ProjectSlug, + ResourceType, + Slug, +) +from renku_data_services.solr.entity_schema import Fields +from renku_data_services.solr.solr_client import DocVersion, DocVersions, ResponseBody + + +def _str_to_slug(value: Any) -> Slug: + if isinstance(value, str): + return Slug.from_name(value) + elif isinstance(value, Slug): + return value + raise errors.ValidationError(message="converting to slug in solr documents was not successful") + + +def _str_to_visibility_public(value: Any) -> Literal[Visibility.PUBLIC]: + if isinstance(value, str) and value.lower() == "public": + return Visibility.PUBLIC + else: + raise ValueError(f"Expected visibility public, got: {value}") + + +class EntityType(StrEnum): + """The different type of entities available from search.""" + + project = "Project" + user = "User" + group = "Group" + dataconnector = "DataConnector" + + @property + def to_resource_type(self) -> ResourceType: + """Map this entity-type to the core resource type.""" + match self: + case EntityType.project: + return ResourceType.project + case EntityType.user: + return ResourceType.user + case EntityType.group: + return ResourceType.group + case EntityType.dataconnector: + return ResourceType.data_connector + + +class EntityDoc(BaseModel, ABC, frozen=True): + """Base class for an entity.""" + + path: str + slug: Annotated[Slug, BeforeValidator(_str_to_slug)] + version: DocVersion = Field( + serialization_alias="_version_", + validation_alias=AliasChoices("version", "_version_"), + default_factory=DocVersions.not_exists, + ) + score: float | None = None + + @property + @abstractmethod + def entity_type(self) -> EntityType: + """Return the type of this entity.""" + ... + + @field_serializer("slug", when_used="always") + def __serialize_slug(self, slug: Slug) -> str: + return slug.value + + def to_dict(self) -> dict[str, Any]: + """Return the dict of this group.""" + dict = self.model_dump(by_alias=True, exclude_none=True, mode="json") + # note: _kind=fullentity is for being backwards compatible, it might not be needed in the future + dict.update(_type=self.entity_type.value, _kind="fullentity") + return dict + + def reset_solr_fields(self) -> Self: + """Resets fields that are filled by solr when querying.""" + return self.model_copy(update={"version": DocVersions.not_exists(), "score": None}) + + +class User(EntityDoc, frozen=True): + """Represents a renku user in SOLR.""" + + id: str + firstName: str | None = None + lastName: str | None = None + visibility: Annotated[Literal[Visibility.PUBLIC], BeforeValidator(_str_to_visibility_public)] = Visibility.PUBLIC + isNamespace: Annotated[Literal[True], BeforeValidator(lambda e: True)] = True + + @property + def entity_type(self) -> EntityType: + """Return the type of this entity.""" + return EntityType.user + + @classmethod + def of(cls, id: str, slug: Slug, firstName: str | None = None, lastName: str | None = None) -> User: + """Create a new user from the given data.""" + return User(path=slug.value, slug=slug, id=id, firstName=firstName, lastName=lastName) + + @classmethod + def from_dict(cls, d: dict[str, Any]) -> User: + """Create a User from a dictionary.""" + return User.model_validate(d) + + +class Group(EntityDoc, frozen=True): + """Represents a renku user in SOLR.""" + + id: ULID + name: str + description: str | None = None + visibility: Annotated[Literal[Visibility.PUBLIC], BeforeValidator(_str_to_visibility_public)] = Visibility.PUBLIC + isNamespace: Annotated[Literal[True], BeforeValidator(lambda e: True)] = True + + @property + def entity_type(self) -> EntityType: + """Return the type of this entity.""" + return EntityType.group + + @field_serializer("id", when_used="always") + def __serialize_id(self, id: ULID) -> str: + return str(id) + + @classmethod + def of(cls, id: ULID, slug: Slug, name: str, description: str | None = None) -> Group: + """Create a new group from the given data.""" + return Group(path=slug.value, slug=slug, id=id, description=description, name=name) + + @classmethod + def from_dict(cls, d: dict[str, Any]) -> Group: + """Create a Group from a dictionary.""" + return Group.model_validate(d) + + +class Project(EntityDoc, frozen=True): + """Represents a renku project in SOLR.""" + + id: ULID + name: str + visibility: Visibility + namespace_path: str = Field( + serialization_alias="namespacePath", + validation_alias=AliasChoices("namespace_path", "namespacePath"), + ) + createdBy: str + creationDate: datetime + repositories: list[str] = Field(default_factory=list) + description: str | None = None + keywords: list[str] = Field(default_factory=list) + isNamespace: Annotated[Literal[True], BeforeValidator(lambda e: True)] = True + namespaceDetails: ResponseBody | None = None + creatorDetails: ResponseBody | None = None + + @property + def entity_type(self) -> EntityType: + """Return the type of this entity.""" + return EntityType.project + + @field_validator("keywords") + @classmethod + def _sort_keywords(cls, v: list[str]) -> list[str]: + v.sort() + return v + + @field_serializer("id", when_used="always") + def __serialize_id(self, id: ULID) -> str: + return str(id) + + @field_serializer("visibility", when_used="always") + def __serialize_visibilty(self, visibility: Visibility) -> str: + return visibility.value + + @field_serializer("creationDate", when_used="always") + def __serialize_creation_date(self, creationDate: datetime) -> str: + return creationDate.strftime("%Y-%m-%dT%H:%M:%SZ") + + @field_validator("creationDate") + @classmethod + def _add_tzinfo(cls, v: datetime) -> datetime: + return v.replace(tzinfo=UTC) + + def in_namespace(self, ns: Group | User) -> Project: + """Set the namespace as given, returning a new object.""" + p_slug = ProjectSlug(self.slug.value) + parent = NamespacePath(NamespaceSlug(ns.slug.value)) + path = (parent / p_slug).serialize() + return self.model_copy(update={"path": path, "namespace_path": ns.path}) + + @classmethod + def from_dict(cls, d: dict[str, Any]) -> Project: + """Create a Project from a dictionary.""" + return Project.model_validate(d) + + +class DataConnector(EntityDoc, frozen=True): + """Represents a global or non-global renku data connector in SOLR.""" + + id: ULID + namespace_path: str | None = Field( + serialization_alias="namespacePath", + validation_alias=AliasChoices("namespace_path", "namespacePath"), + default=None, + ) + name: str + storageType: str + readonly: bool + visibility: Visibility + createdBy: str + creationDate: datetime + description: str | None = None + keywords: list[str] = Field(default_factory=list) + isNamespace: Annotated[Literal[False], BeforeValidator(lambda e: False)] = False + namespaceDetails: ResponseBody | None = None + creatorDetails: ResponseBody | None = None + + @property + def entity_type(self) -> EntityType: + """Return the type of this entity.""" + return EntityType.dataconnector + + @field_validator("keywords") + @classmethod + def _sort_keywords(cls, v: list[str]) -> list[str]: + v.sort() + return v + + @field_serializer("id", when_used="always") + def __serialize_id(self, id: ULID) -> str: + return str(id) + + @field_serializer("visibility", when_used="always") + def __serialize_visibilty(self, visibility: Visibility) -> str: + return visibility.value + + @field_serializer("creationDate", when_used="always") + def __serialize_creation_date(self, creationDate: datetime) -> str: + return creationDate.strftime("%Y-%m-%dT%H:%M:%SZ") + + def in_namespace(self, ns: Group | User | Project | None) -> DataConnector: + """Set the namespace as given, returning a new object.""" + ns_path = ns.path if ns is not None else None + dc_slug = DataConnectorSlug(self.slug.value) + + # I want to reuse the `"/".join(…)` to combine namespace + slug + match ns: + case Group() as g: + parent: NamespacePath | ProjectPath | None = NamespacePath(NamespaceSlug(g.slug.value)) + case User() as u: + parent = NamespacePath(NamespaceSlug(u.slug.value)) + case Project() as p: + parent = ProjectPath(NamespaceSlug(p.namespace_path), ProjectSlug(p.slug.value)) + case None: + parent = None + + path = (parent / dc_slug).serialize() if parent is not None else self.slug.value + return self.model_copy(update={"path": path, "namespace_path": ns_path}) + + @field_validator("creationDate") + @classmethod + def _add_tzinfo(cls, v: datetime) -> datetime: + return v.replace(tzinfo=UTC) + + @classmethod + def from_dict(cls, d: dict[str, Any]) -> DataConnector: + """Create a Project from a dictionary.""" + return DataConnector.model_validate(d) + + +class EntityDocReader: + """Reads dicts into one of the entity document classes.""" + + @classmethod + def from_dict(cls, doc: dict[str, Any]) -> User | Project | Group | DataConnector | None: + """Reads dicts into one of the entity document classes.""" + dt = doc.get(Fields.entity_type) + if dt is None: + return None + else: + discriminator = EntityType[dt.lower()] + match discriminator: + case EntityType.project: + return Project.from_dict(doc) + case EntityType.user: + return User.from_dict(doc) + case EntityType.group: + return Group.from_dict(doc) + case EntityType.dataconnector: + return DataConnector.from_dict(doc) diff --git a/components/renku_data_services/solr/entity_schema.py b/components/renku_data_services/solr/entity_schema.py new file mode 100644 index 000000000..4def2d01f --- /dev/null +++ b/components/renku_data_services/solr/entity_schema.py @@ -0,0 +1,158 @@ +"""Defines the solr schema used for the renku entities.""" + +from typing import Final + +from renku_data_services.solr.solr_migrate import SchemaMigration +from renku_data_services.solr.solr_schema import ( + AddCommand, + Analyzer, + CopyFieldRule, + Field, + FieldName, + FieldType, + Filters, + SchemaCommand, + Tokenizers, + TypeName, +) + + +class Fields: + """A collection of fields.""" + + created_by: Final[FieldName] = FieldName("createdBy") + creation_date: Final[FieldName] = FieldName("creationDate") + description: Final[FieldName] = FieldName("description") + entity_type: Final[FieldName] = FieldName("_type") + kind: Final[FieldName] = FieldName("_kind") + first_name: Final[FieldName] = FieldName("firstName") + id: Final[FieldName] = FieldName("id") + last_name: Final[FieldName] = FieldName("lastName") + members: Final[FieldName] = FieldName("members") + name: Final[FieldName] = FieldName("name") + repositories: Final[FieldName] = FieldName("repositories") + slug: Final[FieldName] = FieldName("slug") + visibility: Final[FieldName] = FieldName("visibility") + keywords: Final[FieldName] = FieldName("keywords") + namespace: Final[FieldName] = FieldName("namespace") + content_all: Final[FieldName] = FieldName("content_all") + deleted: Final[FieldName] = FieldName("deleted") + readonly: Final[FieldName] = FieldName("readonly") + storageType: Final[FieldName] = FieldName("storageType") + path: Final[FieldName] = FieldName("path") + namespace_path: Final[FieldName] = FieldName("namespacePath") + is_namespace: Final[FieldName] = FieldName("isNamespace") + + # virtual score field + score: Final[FieldName] = FieldName("score") + + # sub query fields + creator_details: Final[FieldName] = FieldName("creatorDetails") + namespace_details: Final[FieldName] = FieldName("namespaceDetails") + + +class Analyzers: + """A collection of analyzers.""" + + text_index: Final[Analyzer] = Analyzer( + tokenizer=Tokenizers.uax29UrlEmail, + filters=[ + Filters.lowercase, + Filters.stop, + Filters.english_minimal_stem, + Filters.ascii_folding, + Filters.edgeNgram(2, 8, True), + ], + ) + + text_query: Final[Analyzer] = Analyzer( + tokenizer=Tokenizers.uax29UrlEmail, + filters=[ + Filters.lowercase, + Filters.stop, + Filters.english_minimal_stem, + Filters.ascii_folding, + ], + ) + + +class FieldTypes: + """A collection of field types.""" + + id: Final[FieldType] = FieldType.id(TypeName("SearchId")).make_doc_value() + string: Final[FieldType] = FieldType.str(TypeName("SearchString")).make_doc_value() + boolean: Final[FieldType] = FieldType.boolean(TypeName("SearchBool")) + text: Final[FieldType] = ( + FieldType.text(TypeName("SearchText")) + .with_index_analyzer(Analyzers.text_index) + .with_query_analyzer(Analyzers.text_query) + ) + text_all: Final[FieldType] = ( + FieldType.text(TypeName("SearchTextAll")) + .with_index_analyzer(Analyzers.text_index) + .with_query_analyzer(Analyzers.text_query) + .make_multi_valued() + ) + date_time: Final[FieldType] = FieldType.date_time_point(TypeName("SearchDateTime")) + + +initial_entity_schema: Final[list[SchemaCommand]] = [ + AddCommand(FieldTypes.id), + AddCommand(FieldTypes.string), + AddCommand(FieldTypes.text), + AddCommand(FieldTypes.date_time), + AddCommand(Field.of(Fields.entity_type, FieldTypes.string)), + AddCommand(Field.of(Fields.kind, FieldTypes.string)), + AddCommand(Field.of(Fields.name, FieldTypes.text)), + AddCommand(Field.of(Fields.slug, FieldTypes.string)), + AddCommand(Field.of(Fields.repositories, FieldTypes.string).make_multi_valued()), + AddCommand(Field.of(Fields.visibility, FieldTypes.string)), + AddCommand(Field.of(Fields.description, FieldTypes.text)), + AddCommand(Field.of(Fields.created_by, FieldTypes.id)), + AddCommand(Field.of(Fields.creation_date, FieldTypes.date_time)), + # text all + AddCommand(FieldTypes.text_all), + AddCommand(Field.of(Fields.content_all, FieldTypes.text_all).make_multi_valued()), + AddCommand(CopyFieldRule(source=Fields.name, dest=Fields.content_all)), + AddCommand(CopyFieldRule(source=Fields.description, dest=Fields.content_all)), + AddCommand(CopyFieldRule(source=Fields.slug, dest=Fields.content_all)), + AddCommand(CopyFieldRule(source=Fields.repositories, dest=Fields.content_all)), + # user fields + AddCommand(Field.of(Fields.first_name, FieldTypes.string)), + AddCommand(Field.of(Fields.last_name, FieldTypes.string)), + AddCommand(CopyFieldRule(source=Fields.first_name, dest=Fields.content_all)), + AddCommand(CopyFieldRule(source=Fields.last_name, dest=Fields.content_all)), + # keywords + AddCommand(Field.of(Fields.keywords, FieldTypes.string).make_multi_valued()), + AddCommand(CopyFieldRule(source=Fields.keywords, dest=Fields.content_all)), + # namespace + AddCommand(Field.of(Fields.namespace, FieldTypes.string)), + AddCommand(CopyFieldRule(source=Fields.namespace, dest=Fields.content_all)), +] + + +all_migrations: Final[list[SchemaMigration]] = [ + SchemaMigration(version=9, commands=initial_entity_schema, requires_reindex=True), + SchemaMigration( + version=10, + commands=[AddCommand(FieldTypes.boolean), AddCommand(Field.of(Fields.deleted, FieldTypes.boolean))], + requires_reindex=False, + ), + SchemaMigration( + version=11, + commands=[ + AddCommand(Field.of(Fields.readonly, FieldTypes.boolean)), + AddCommand(Field.of(Fields.storageType, FieldTypes.string)), + ], + requires_reindex=False, + ), + SchemaMigration( + version=12, + commands=[ + AddCommand(Field.of(Fields.path, FieldTypes.id)), + AddCommand(Field.of(Fields.namespace_path, FieldTypes.id)), + AddCommand(Field.of(Fields.is_namespace, FieldTypes.boolean)), + ], + requires_reindex=True, + ), +] diff --git a/components/renku_data_services/solr/solr_client.py b/components/renku_data_services/solr/solr_client.py new file mode 100644 index 000000000..c075e24df --- /dev/null +++ b/components/renku_data_services/solr/solr_client.py @@ -0,0 +1,803 @@ +"""This defines an interface to SOLR.""" + +from __future__ import annotations + +import json +import os +from abc import ABC, abstractmethod +from collections.abc import Callable +from contextlib import AbstractAsyncContextManager +from dataclasses import dataclass, field +from enum import StrEnum +from types import TracebackType +from typing import Any, Literal, NewType, Optional, Protocol, Self, final +from urllib.parse import urljoin, urlparse, urlunparse + +from httpx import AsyncClient, BasicAuth, ConnectError, Response +from pydantic import ( + AliasChoices, + BaseModel, + Field, + ModelWrapValidatorHandler, + ValidationError, + field_serializer, + model_serializer, + model_validator, +) + +from renku_data_services.app_config import logging +from renku_data_services.errors.errors import BaseError +from renku_data_services.solr.solr_schema import CoreSchema, FieldName, SchemaCommandList + +logger = logging.getLogger(__name__) + + +@dataclass +@final +class SolrUser: + """User for authenticating at SOLR.""" + + username: str + password: str = field(repr=False) + + def __str__(self) -> str: + pstr = "***" if self.password != "" else "" # nosec + return f"(user={self.username}, password={pstr})" + + +@dataclass +@final +class SolrClientConfig: + """Configuration object for instantiating a client.""" + + base_url: str + core: str + user: Optional[SolrUser] = None + timeout: int = 600 + + @classmethod + def from_env(cls) -> SolrClientConfig: + """Create a configuration from environment variables.""" + url = os.environ["SOLR_URL"] + core = os.environ.get("SOLR_CORE", "renku-search") + username = os.environ.get("SOLR_USER") + password = os.environ.get("SOLR_PASSWORD") + + tstr = os.environ.get("SOLR_REQUEST_TIMEOUT", "600") + try: + timeout = int(tstr) if tstr is not None else 600 + except ValueError: + logger.warning(f"SOLR_REQUEST_TIMEOUT is not an integer: {tstr}") + timeout = 600 + + user = SolrUser(username=username, password=str(password)) if username is not None else None + return cls(url, core, user, timeout) + + def __str__(self) -> str: + return f"SolrClientConfig(base_url={self.base_url}, core={self.core}, user={self.user}, timeout={self.timeout})" + + +class SolrClientException(BaseError, ABC): + """Base exception for solr client.""" + + def __init__(self, message: str, code: int = 1500, status_code: int = 500) -> None: + super().__init__(message=message, code=code, status_code=status_code) + + +class SortDirection(StrEnum): + """Direction for sorting a field.""" + + asc = "asc" + desc = "desc" + + +@final +class SubQuery(BaseModel, frozen=True): + """Represents a solr sub query.""" + + query: str + filter: str + limit: int + offset: int = 0 + fields: list[str | FieldName] = Field(default_factory=list) + sort: list[tuple[FieldName, SortDirection]] = Field(default_factory=list) + + def with_sort(self, s: list[tuple[FieldName, SortDirection]]) -> Self: + """Return a copy with a new sort definition.""" + return self.model_copy(update={"sort": s}) + + def with_fields(self, fn: FieldName, *args: FieldName) -> Self: + """Return a copy with a new field list.""" + fs = [fn] + list(args) + return self.model_copy(update={"fields": fs}) + + def with_all_fields(self) -> Self: + """Return a copy with fields set to ['*'].""" + return self.model_copy(update={"fields": ["*"]}) + + def with_filter(self, q: str) -> Self: + """Return a copy with a new filter query.""" + return self.model_copy(update={"filter": q}) + + def with_query(self, q: str) -> Self: + """Return a copy with a new query.""" + return self.model_copy(update={"query": q}) + + def to_params(self, field: FieldName) -> dict[str, str]: + """Return a dictionary intended to be added to the main query params.""" + + def key(s: str) -> str: + return f"{field}.{s}" + + result = {key("q"): self.query} + if self.filter != "": + result.update({key("fq"): self.filter}) + + if self.limit > 0: + result.update({key("limit"): str(self.limit)}) + + if self.offset > 0: + result.update({key("offset"): str(self.offset)}) + + if self.fields != []: + result.update({key("fl"): ",".join(self.fields)}) + + if self.sort != []: + solr_sort = ",".join(list(map(lambda t: f"{t[0]} {t[1].value}", self.sort))) + result.update({key("sort"): solr_sort}) + + return result + + +@final +class FacetAlgorithm(StrEnum): + """Available facet algorithms for solr.""" + + doc_values = "dv" + un_inverted_field = "uif" + doc_values_hash = "dvhash" + enum = "enum" + stream = "stream" + smart = "smart" + + +@final +class FacetRange(BaseModel, frozen=True): + """A range definition used within the FacetRange.""" + + start: int | Literal["*"] = Field(serialization_alias="from", validation_alias=AliasChoices("from", "start")) + to: int | Literal["*"] + inclusive_from: bool = True + inclusive_to: bool = False + + def to_dict(self) -> dict[str, Any]: + """Return the dict of this object.""" + return self.model_dump(by_alias=True) + + +@final +class FacetTerms(BaseModel, frozen=True): + """The terms facet request. + + See: https://solr.apache.org/guide/solr/latest/query-guide/json-facet-api.html#terms-facet + """ + + name: FieldName + field: FieldName + limit: int | None = None + min_count: int | None = Field( + serialization_alias="mincount", validation_alias=AliasChoices("mincount", "min_count"), default=None + ) + method: FacetAlgorithm | None = None + missing: bool = False + num_buckets: bool = Field( + serialization_alias="numBuckets", validation_alias=AliasChoices("numBuckets", "num_buckets"), default=False + ) + all_buckets: bool = Field( + serialization_alias="allBuckets", validation_alias=AliasChoices("allBuckets", "all_buckets"), default=False + ) + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation of this object.""" + result = self.model_dump(by_alias=True, exclude_none=True) + result.update({"type": "terms"}) + result.pop("name") + return {f"{self.name}": result} + + +@final +class FacetArbitraryRange(BaseModel, frozen=True): + """The range facet. + + See: https://solr.apache.org/guide/solr/latest/query-guide/json-facet-api.html#range-facet + """ + + name: FieldName + field: FieldName + ranges: list[FacetRange] + + def to_dict(self) -> dict[str, Any]: + """Return the dict of this object.""" + result = self.model_dump(by_alias=True, exclude_defaults=True) + result.update({"type": "range"}) + result.pop("name") + return {f"{self.name}": result} + + +@final +class SolrFacets(BaseModel, frozen=True): + """A facet query part consisting of multiple facet requests.""" + + facets: list[FacetTerms | FacetArbitraryRange] + + @model_serializer() + def to_dict(self) -> dict[str, Any]: + """Return the dict representation of this object.""" + result = {} + [result := result | x.to_dict() for x in self.facets] + return result + + def with_facet(self, f: FacetTerms | FacetArbitraryRange) -> SolrFacets: + """Return a copy with the given facet added.""" + return SolrFacets(facets=self.facets + [f]) + + @classmethod + def of(cls, *args: FacetTerms | FacetArbitraryRange) -> SolrFacets: + """Contsructor accepting varags.""" + return SolrFacets(facets=list(args)) + + @classmethod + def empty(cls) -> SolrFacets: + """Return an empty facets request.""" + return SolrFacets(facets=[]) + + +@final +class FacetCount(BaseModel, frozen=True): + """A facet count consists of the field and its determined count.""" + + field: FieldName = Field(serialization_alias="val", validation_alias=AliasChoices("val", "field")) + count: int + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation of this object.""" + return self.model_dump(by_alias=True) + + +@final +class FacetBuckets(BaseModel, frozen=True): + """A list of bucket counts as part of a facet response.""" + + buckets: list[FacetCount] + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation of this object as returned by solr.""" + return self.model_dump(by_alias=True) + + def to_simple_dict(self) -> dict[str, int]: + """Return the counts as a simple field-count dict.""" + els = [{x.field: x.count} for x in self.buckets] + result = {} + [result := result | x for x in els] + return result + + @classmethod + def of(cls, *args: FacetCount) -> Self: + """Constructor for varargs.""" + return FacetBuckets(buckets=list(args)) + + @classmethod + def empty(cls) -> Self: + """Return an empty object.""" + return FacetBuckets(buckets=[]) + + +@final +class SolrBucketFacetResponse(BaseModel, frozen=True): + """The response to 'bucket' facet requests, like terms and range. + + See: https://solr.apache.org/guide/solr/latest/query-guide/json-facet-api.html#types-of-facets + """ + + count: int + buckets: dict[FieldName, FacetBuckets] + + def get_counts(self, field: FieldName) -> FacetBuckets: + """Return the facet buckets associated to the given field.""" + v = self.buckets.get(field) + return v if v is not None else FacetBuckets.empty() + + @model_serializer() + def to_dict(self) -> dict[str, Any]: + """Return the dict of this object.""" + result: dict[str, Any] = {"count": self.count} + for key in self.buckets: + result.update({key: self.buckets[key].to_dict()}) + + return result + + @classmethod + def empty(cls) -> SolrBucketFacetResponse: + """Return an empty response.""" + return SolrBucketFacetResponse(count=0, buckets={}) + + @model_validator(mode="wrap") + @classmethod + def _validate( + cls, data: Any, handler: ModelWrapValidatorHandler[SolrBucketFacetResponse] + ) -> SolrBucketFacetResponse: + try: + return handler(data) + except ValidationError as err: + if isinstance(data, dict): + count: int | None = data.get("count") + if count is not None: + buckets: dict[FieldName, FacetBuckets] = {} + for key in data: + if key != "count": + bb = FacetBuckets.model_validate(data[key]) + buckets.update({key: bb}) + + return SolrBucketFacetResponse(count=count, buckets=buckets) + else: + raise ValueError(f"No 'count' property in dict: {data}") from err + else: + raise ValueError(f"Expected a dict to, but got: {data}") from err + + +@final +class SolrQuery(BaseModel, frozen=True): + """A query to solr using the JSON request api. + + See: https://solr.apache.org/guide/solr/latest/query-guide/json-request-api.html + """ + + query: str + filter: list[str] = Field(default_factory=list) + limit: int = 50 + offset: int = 0 + fields: list[str | FieldName] = Field(default_factory=list) + sort: list[tuple[FieldName, SortDirection]] = Field(default_factory=list) + params: dict[str, str] = Field(default_factory=dict) + facet: SolrFacets = Field(default_factory=SolrFacets.empty) + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation of this query.""" + return self.model_dump(exclude_defaults=True) + + def with_sort(self, s: list[tuple[FieldName, SortDirection]]) -> Self: + """Return a copy of this with an updated sort.""" + return self.model_copy(update={"sort": s}) + + def with_facets(self, fs: SolrFacets) -> Self: + """Return a copy with the given facet requests.""" + return self.model_copy(update={"facet": fs}) + + def with_facet(self, f: FacetTerms | FacetArbitraryRange) -> Self: + """Return a copy with the given facet request added.""" + nf = self.facet.with_facet(f) + return self.model_copy(update={"facet": nf}) + + def add_sub_query(self, field: FieldName, sq: SubQuery) -> Self: + """Add the sub query to this query.""" + np = self.params | sq.to_params(field) + fs = self.fields + [FieldName(f"{field}:[subquery]")] + return self.model_copy(update={"params": np, "fields": fs}) + + def add_filter(self, *args: str) -> Self: + """Return a copy with the given filter query added.""" + if len(args) == 0: + return self + else: + fq = self.filter + list(args) + return self.model_copy(update={"filter": fq}) + + @field_serializer("sort", when_used="always") + def __serialize_sort(self, sort: list[tuple[FieldName, SortDirection]]) -> str: + return ",".join(list(map(lambda t: f"{t[0]} {t[1].value}", sort))) + + @classmethod + def query_all_fields(cls, qstr: str, limit: int = 50, offset: int = 0) -> SolrQuery: + """Create a query with defaults returning all fields of a document.""" + return SolrQuery(query=qstr, fields=["*", "score"], limit=limit, offset=offset) + + +@final +class ResponseHeader(BaseModel): + """The responseHeader object as returned by solr.""" + + status: int + query_time: int = Field( + serialization_alias="QTime", validation_alias=AliasChoices("QTime", "queryTime", "query_time"), default=0 + ) + params: dict[str, str] = Field(default_factory=dict) + + +@final +class UpsertSuccess(BaseModel): + """Response for an successful update.""" + + header: ResponseHeader + + +DocVersion = NewType("DocVersion", int) +""" The `_version_` field can be used to enable optimistic concurrency control: + https://solr.apache.org/guide/solr/latest/indexing-guide/partial-document-updates.html#optimistic-concurrency +""" + + +class DocVersions: + """Possible values for the _version_ field.""" + + @classmethod + def not_exists(cls) -> DocVersion: + """Specifies a version requiring a document to not exist.""" + return DocVersion(-1) + + @classmethod + def exists(cls) -> DocVersion: + """Specifies a version requiring a document to exist.""" + return DocVersion(1) + + @classmethod + def off(cls) -> DocVersion: + """Specifies a version indicating no version requirement. + + Optimistic concurrency control is not used. With this value a + document will be overwritting if it exists or inserted. + """ + return DocVersion(0) + + @classmethod + def exact(cls, n: int) -> DocVersion: + """Specifies an exact version.""" + return DocVersion(n) + + +type UpsertResponse = UpsertSuccess | Literal["VersionConflict"] + + +class SolrDocument(Protocol): + """The base for a document in SOLR. + + All documents should have an `id` property denoting their primary identity. + """ + + @property + def id(self) -> str: + """The document id.""" + ... + + def to_dict(self) -> dict[str, Any]: + """Return a dict representation of this document.""" + ... + + +@dataclass +class RawDocument(SolrDocument): + """A simple wrapper around a JSON dictionary.""" + + data: dict[str, Any] + + @property + def id(self) -> str: + """Return the document id.""" + return str(self.data["id"]) + + def to_dict(self) -> dict[str, Any]: + """Return the data dictionary.""" + return self.data + + +class ResponseBody(BaseModel): + """The body of a search response.""" + + num_found: int = Field(serialization_alias="numFound", validation_alias=AliasChoices("numFound", "num_found")) + start: int + num_found_exact: bool = Field( + serialization_alias="numFoundExact", validation_alias=AliasChoices("numFoundExact", "num_found_exact") + ) + docs: list[dict[str, Any]] + + def read_to[A](self, f: Callable[[dict[str, Any]], A | None]) -> list[A]: + """Read the documents array using the given function.""" + result = [] + for doc in self.docs: + a = f(doc) + if a is not None: + result.append(a) + + return result + + +class QueryResponse(BaseModel): + """The complete response object for running a query. + + Note, solr doesn't set the `responseHeader` for get-by-id requests. Otherwise it will be set. + """ + + responseHeader: ResponseHeader = Field( + serialization_alias="responseHeader", + validation_alias=AliasChoices("responseHeader", "response_header"), + default_factory=lambda: ResponseHeader(status=200), + ) + facets: SolrBucketFacetResponse = Field(default_factory=SolrBucketFacetResponse.empty) + response: ResponseBody + + +class SolrClientConnectException(SolrClientException): + """Error when connecting to solr fails.""" + + def __init__(self, cause: ConnectError): + super().__init__(f"Connecting to solr at '{cause.request.url}' failed: {cause}", code=1503, status_code=503) + + +class SolrClientGetByIdException(SolrClientException): + """Error when a lookup by document id failed.""" + + def __init__(self, id: str, resp: Response): + super().__init__( + f"Lookup solr document by id {id} failed with unexpected status {resp.status_code} ({resp.text})" + ) + + +class SolrClientQueryException(SolrClientException): + """Error when querying failed.""" + + def __init__(self, query: SolrQuery, resp: Response): + super().__init__( + f"Querying solr with '{query.to_dict()}' failed with unexpected status {resp.status_code} ({resp.text})" + ) + + +class SolrClientUpsertException(SolrClientException): + """Error when upserting.""" + + def __init__(self, docs: list[SolrDocument], resp: Response): + count = len(docs) + super().__init__(f"Inserting {count} documents failed with status {resp.status_code} ({resp.text})") + + +class SolrClientStatusException(SolrClientException): + """Error when obtaining the status of the core.""" + + def __init__(self, cfg: SolrClientConfig, resp: Response): + super().__init__(f"Error getting the status of core {cfg.core}. {resp.status_code}/{resp.text}") + + +class SolrClientCreateCoreException(SolrClientException): + """Error when creating a core.""" + + def __init__(self, core: str, resp: Response): + super().__init__(f"Error creating core '{core}': {resp.status_code}/{resp.text}") + + +class SolrClient(AbstractAsyncContextManager, ABC): + """A client to SOLR.""" + + async def close(self) -> None: + """Shuts down this client.""" + + @abstractmethod + async def get_raw(self, id: str) -> Response: + """Get a document by id and return the http response.""" + ... + + @abstractmethod + async def query_raw(self, query: SolrQuery) -> Response: + """Query documents and return the http response.""" + ... + + @abstractmethod + async def get(self, id: str) -> QueryResponse: + """Get a document by id, returning a `QueryResponse`.""" + ... + + @abstractmethod + async def query(self, query: SolrQuery) -> QueryResponse: + """Query documents, returning a `QueryResponse`.""" + ... + + @abstractmethod + async def modify_schema(self, cmds: SchemaCommandList) -> Response: + """Updates the schema with the given commands.""" + ... + + @abstractmethod + async def upsert(self, docs: list[SolrDocument]) -> UpsertResponse: + """Inserts or updates a document in SOLR.""" + ... + + @abstractmethod + async def get_schema(self) -> CoreSchema: + """Return the schema of the core.""" + ... + + @abstractmethod + async def delete(self, query: str) -> Response: + """Delete data that matches the `query`.""" + ... + + +class DefaultSolrClient(SolrClient): + """Default implementation of the solr client.""" + + delegate: AsyncClient + config: SolrClientConfig + + def __init__(self, cfg: SolrClientConfig): + self.config = cfg + url_parsed = list(urlparse(cfg.base_url)) + url_parsed[2] = urljoin(url_parsed[2], f"/solr/{cfg.core}") + burl = urlunparse(url_parsed) + bauth = BasicAuth(username=cfg.user.username, password=cfg.user.password) if cfg.user is not None else None + self.delegate = AsyncClient(auth=bauth, base_url=burl, timeout=cfg.timeout) + + def __repr__(self) -> str: + return f"DefaultSolrClient(delegate={self.delegate}, config={self.config})" + + async def __aenter__(self) -> Self: + await self.delegate.__aenter__() + return self + + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: + return await self.delegate.__aexit__(exc_type, exc, tb) + + async def get_raw(self, id: str) -> Response: + """Query documents and return the http response.""" + try: + return await self.delegate.get("/get", params={"wt": "json", "ids": id}) + except ConnectError as e: + raise SolrClientConnectException(e) from e + + async def query_raw(self, query: SolrQuery) -> Response: + """Query documents and return the http response.""" + try: + logger.debug(f"Running solr query: {self.config.base_url}/solr/{self.config.core}") + return await self.delegate.post("/query", params={"wt": "json"}, json=query.to_dict()) + except ConnectError as e: + raise SolrClientConnectException(e) from e + + async def get(self, id: str) -> QueryResponse: + """Get a document by id, returning a `QueryResponse`.""" + resp = await self.get_raw(id) + if not resp.is_success: + raise SolrClientGetByIdException(id, resp) + else: + return QueryResponse.model_validate(resp.json()) + + async def query(self, query: SolrQuery) -> QueryResponse: + """Query documents, returning a `QueryResponse`.""" + resp = await self.query_raw(query) + if not resp.is_success: + raise SolrClientQueryException(query, resp) + else: + return QueryResponse.model_validate(resp.raise_for_status().json()) + + async def modify_schema(self, cmds: SchemaCommandList) -> Response: + """Updates the schema with the given commands.""" + data = cmds.to_json() + logger.debug(f"modify schema: {data}") + try: + return await self.delegate.post( + "/schema", + params={"commit": "true", "overwrite": "true"}, + content=data.encode("utf-8"), + headers={"Content-Type": "application/json"}, + ) + except ConnectError as e: + raise SolrClientConnectException(e) from e + + async def upsert(self, docs: list[SolrDocument]) -> UpsertResponse: + """Inserts or updates a document in SOLR. + + The `_version_` property determines wether optimistic locking is used. In this + case the result is either expected to be successful or a version conflict. All + other outcomes are raised as an exception. + """ + j = json.dumps([e.to_dict() for e in docs]) + logger.debug(f"upserting: {j}") + try: + res = await self.delegate.post( + "/update", + params={"commit": "true"}, + content=j.encode("utf-8"), + headers={"Content-Type": "application/json"}, + ) + match res.status_code: + case 200: + h = ResponseHeader.model_validate(res.json()["responseHeader"]) + return UpsertSuccess(header=h) + case 409: + return "VersionConflict" + case _: + raise SolrClientUpsertException(docs, res) from None + except ConnectError as e: + raise SolrClientConnectException(e) from e + + async def get_schema(self) -> CoreSchema: + """Return the current schema.""" + resp = await self.delegate.get("/schema") + cs = CoreSchema.model_validate(resp.json()["schema"]) + return cs + + async def delete(self, query: str) -> Response: + """Delete all documents that matches `query`.""" + cmd = {"delete": {"query": query}} + return await self.delegate.post( + "/update", + params={"commit": "true"}, + content=json.dumps(cmd).encode("utf-8"), + headers={"Content-Type": "application/json"}, + ) + + async def close(self) -> None: + """Close this client and free resources.""" + return await self.delegate.aclose() + + +class SolrAdminClient(AbstractAsyncContextManager, ABC): + """A client to the core admin api. + + Url: https://solr.apache.org/guide/solr/latest/configuration-guide/coreadmin-api.html + """ + + @abstractmethod + async def core_status(self, core_name: str | None) -> dict[str, Any] | None: + """Return the status of the connected core.""" + ... + + @abstractmethod + async def create(self, core_name: str | None) -> None: + """Create a core.""" + ... + + +class DefaultSolrAdminClient(SolrAdminClient): + """A client to the core admin api. + + Url: https://solr.apache.org/guide/solr/latest/configuration-guide/coreadmin-api.html + """ + + delegate: AsyncClient + config: SolrClientConfig + + def __init__(self, cfg: SolrClientConfig): + self.config = cfg + url_parsed = list(urlparse(cfg.base_url)) + url_parsed[2] = urljoin(url_parsed[2], "/api/cores") + burl = urlunparse(url_parsed) + bauth = BasicAuth(username=cfg.user.username, password=cfg.user.password) if cfg.user is not None else None + self.delegate = AsyncClient(auth=bauth, base_url=burl, timeout=cfg.timeout) + + async def __aenter__(self) -> Self: + await self.delegate.__aenter__() + return self + + async def __aexit__( + self, exc_type: type[BaseException] | None, exc: BaseException | None, tb: TracebackType | None + ) -> None: + return await self.delegate.__aexit__(exc_type, exc, tb) + + async def core_status(self, core_name: str | None) -> dict[str, Any] | None: + """Return the status of the connected core or the one given by `core_name`.""" + core = core_name or self.config.core + resp = await self.delegate.get(f"/{core}") + if not resp.is_success: + raise SolrClientStatusException(self.config, resp) + else: + data = resp.json()["status"][self.config.core] + # if the core doesn't exist, solr returns 200 with an empty body + return data if data.get("name") == self.config.core else None + + async def create(self, core_name: str | None) -> None: + """Create a core with the given `core_name` or the name provided in the config object.""" + core = core_name or self.config.core + data = {"create": {"name": core, "configSet": "_default"}} + resp = await self.delegate.post("", json=data) + if not resp.is_success: + raise SolrClientCreateCoreException(core, resp) + else: + return None diff --git a/components/renku_data_services/solr/solr_migrate.py b/components/renku_data_services/solr/solr_migrate.py new file mode 100644 index 000000000..bcb024ca5 --- /dev/null +++ b/components/renku_data_services/solr/solr_migrate.py @@ -0,0 +1,280 @@ +"""Manage solr schema migrations.""" + +from dataclasses import dataclass +from typing import Any, Self + +import pydantic +from pydantic import AliasChoices, BaseModel + +from renku_data_services.app_config import logging +from renku_data_services.solr.solr_client import ( + DefaultSolrAdminClient, + DefaultSolrClient, + DocVersion, + DocVersions, + SolrClientConfig, + SolrClientCreateCoreException, + UpsertSuccess, +) +from renku_data_services.solr.solr_schema import ( + AddCommand, + CopyFieldRule, + CoreSchema, + DeleteDynamicFieldCommand, + DeleteFieldCommand, + DeleteFieldTypeCommand, + DynamicFieldRule, + Field, + FieldType, + ReplaceCommand, + SchemaCommand, + SchemaCommandList, +) + +logger = logging.getLogger(__name__) + +logger = logging.Logger(__name__) + + +def _is_applied(schema: CoreSchema, cmd: SchemaCommand) -> bool: + """Check whether a schema command is already applied to the given schema.""" + match cmd: + case AddCommand(FieldType() as ft): + return any(x.name == ft.name for x in schema.fieldTypes) + + case AddCommand(Field() as f): + return any(x.name == f.name for x in schema.fields) + + case AddCommand(DynamicFieldRule() as f): + return any(x.name == f.name for x in schema.dynamicFields) + + case AddCommand(CopyFieldRule() as f): + return any(x.source == f.source and x.dest == f.dest for x in schema.copyFields) + + case DeleteFieldCommand(f): + return all(x.name != f for x in schema.fields) + + case DeleteFieldTypeCommand(f): + return all(x.name != f for x in schema.fieldTypes) + + case DeleteDynamicFieldCommand(f): + return all(x.name != f for x in schema.dynamicFields) + + case ReplaceCommand(FieldType() as ft): + return any(x == ft for x in schema.fieldTypes) + + case ReplaceCommand(Field() as f): + return any(x == f for x in schema.fields) + + case _: + return False + + +@dataclass +class SchemaMigration: + """A migration consisting of the version and a set of schema commands.""" + + version: int + commands: list[SchemaCommand] + requires_reindex: bool + + def is_empty(self) -> bool: + """Return whether the migration contains any commands.""" + return self.commands == [] + + def align_with(self, schema: CoreSchema) -> Self: + """Aligns the list of schema commands to the given schema. + + Return a copy of this value, removing all schema commands that have already + been applied to the given schema. + """ + cmds = list(filter(lambda e: not (_is_applied(schema, e)), self.commands)) + return type(self)(version=self.version, commands=cmds, requires_reindex=self.requires_reindex) + + +@dataclass +class MigrateResult: + """The overall result of running a set of migrations.""" + + start_version: int | None + end_version: int | None + migrations_run: int + migrations_skipped: int + requires_reindex: bool + + @classmethod + def empty(cls, version: int | None = None) -> "MigrateResult": + """Create an empty MigrateResult.""" + return MigrateResult(version, version, 0, 0, False) + + +class VersionDoc(BaseModel): + """A document tracking the schema migration. + + The field names correspond to solr dynamic fields. Since this is + the document that gets inserted before any of our schema migration + runs, it uses solr dynamic fields: Appending a `_` to + a name to indicate the type of the field. So a `_b` is a bool and + a `_l` is a long/int. + """ + + id: str + current_schema_version_l: int + migration_running_b: bool + version: DocVersion = pydantic.Field( + serialization_alias="_version_", validation_alias=AliasChoices("version", "_version_") + ) + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation of this document.""" + return self.model_dump(by_alias=True) + + +class MigrationState(BaseModel): + """A private class tracking intermediate schema changes per migration.""" + + solr_schema: CoreSchema + doc: VersionDoc + skipped_migrations: int + + +class SolrMigrateException(Exception): + """Base exception for migration errors.""" + + def __init(self, message: str) -> None: + super().__init__(message) + + +class SchemaMigrator: + """Allows to inspect the current schema version and run schema migrations against a solr core.""" + + def __init__(self, cfg: SolrClientConfig) -> None: + self.__config = cfg + self.__docId: str = "VERSION_ID_EB779C6B-1D96-47CB-B304-BECF15E4A607" + + async def ensure_core(self) -> None: + """Ensures an existing core. + + If no core is found, one is created using the admin api. + """ + async with DefaultSolrAdminClient(self.__config) as client: + status = await client.core_status(None) + if status is None: + try: + logger.warning(f"Solr core {self.__config.core} not found. Attempt to create it.") + await client.create(None) + except SolrClientCreateCoreException as info: + logger.error(f"Error creating core {self.__config.core}, assume it already exists", exc_info=info) + else: + logger.info(f"Solr core {self.__config.core} already exists.") + + async def current_version(self) -> int | None: + """Return the current schema version.""" + async with DefaultSolrClient(self.__config) as client: + doc = await self.__current_version0(client) + if doc is None: + return None + else: + return doc.current_schema_version_l + + async def __current_version0(self, client: DefaultSolrClient) -> VersionDoc | None: + """Return the current schema version document.""" + resp = await client.get_raw(self.__docId) + if not resp.is_success: + raise SolrMigrateException(f"Unexpected return from solr: {resp.status_code} ({resp.text})") + else: + docs = resp.json()["response"]["docs"] + if docs == []: + return None + else: + return VersionDoc.model_validate(docs[0]) + + async def migrate(self, migrations: list[SchemaMigration]) -> MigrateResult: + """Run all given migrations, skipping those that have been done before.""" + async with DefaultSolrClient(self.__config) as client: + initial_doc = await self.__current_version0(client) + if initial_doc is None: + initial_doc = VersionDoc( + id=self.__docId, + current_schema_version_l=-1, + migration_running_b=False, + version=DocVersions.not_exists(), + ) + + if initial_doc.migration_running_b: + logger.info("A solr migration is already running") + return MigrateResult.empty() + else: + initial_doc.migration_running_b = True + update_result = await client.upsert([initial_doc]) + match update_result: + case "VersionConflict": + logger.info("Another solr migration just begun. Skipping this one.") + return MigrateResult.empty() + case UpsertSuccess(): + try: + initial_doc = await self.__current_version0(client) + if initial_doc is None: + raise SolrMigrateException("No inital migration document found after inserting it.") + return await self.__doMigrate(client, migrations, initial_doc) + finally: + last_doc = await self.__current_version0(client) + if last_doc is not None: + last_doc.migration_running_b = False + await client.upsert([last_doc]) + + async def __doMigrate( + self, client: DefaultSolrClient, migrations: list[SchemaMigration], initialDoc: VersionDoc + ) -> MigrateResult: + logger.info( + f"Core {self.__config.core}: Found current schema version: " + f"{initialDoc.current_schema_version_l} using {self.__docId}" + ) + remain = [e for e in migrations if e.version > initialDoc.current_schema_version_l] + logger.info(f"There are {len(remain)} migrations to run") + if remain == []: + return MigrateResult.empty(version=initialDoc.current_schema_version_l) + + remain.sort(key=lambda m: m.version) + schema = await client.get_schema() + state = MigrationState(solr_schema=schema, doc=initialDoc, skipped_migrations=0) + for x in remain: + state = await self.__applyMigration(client, state, x) + + return MigrateResult( + start_version=initialDoc.current_schema_version_l, + end_version=remain[-1].version, + migrations_run=len(remain), + migrations_skipped=state.skipped_migrations, + requires_reindex=any(x.requires_reindex for x in remain), + ) + + async def __applyMigration( + self, client: DefaultSolrClient, state: MigrationState, m: SchemaMigration + ) -> MigrationState: + cmds = m.align_with(state.solr_schema) + if cmds.is_empty(): + logger.info(f"Migration {m.version} seems to be applied. Skipping it") + v = await self.__upsert_version(client, state.doc, m.version) + return state.model_copy(update={"skippedMigrations": state.skipped_migrations + 1, "doc": v}) + else: + r = await client.modify_schema(SchemaCommandList(cmds.commands)) + if not r.is_success: + raise SolrMigrateException(f"Schema modification failed {r.status_code}/{r.text}") + + schema = await client.get_schema() + doc = await self.__upsert_version(client, state.doc, m.version) + return MigrationState(solr_schema=schema, doc=doc, skipped_migrations=state.skipped_migrations) + + async def __upsert_version(self, client: DefaultSolrClient, current: VersionDoc, next: int) -> VersionDoc: + logger.info(f"core {self.__config.core}: set schema migration version to {next}") + next_doc = current.model_copy(update={"current_schema_version_l": next}) + update_result = await client.upsert([next_doc]) + match update_result: + case "VersionConflict": + raise SolrMigrateException("VersionConflict when updating migration tracking document!") + case _: + pass + + result = await client.get(self.__docId) + return VersionDoc.model_validate(result.response.docs[0]) diff --git a/components/renku_data_services/solr/solr_schema.py b/components/renku_data_services/solr/solr_schema.py new file mode 100644 index 000000000..e54ccdca2 --- /dev/null +++ b/components/renku_data_services/solr/solr_schema.py @@ -0,0 +1,439 @@ +"""Schema modification for solr.""" + +from __future__ import annotations + +import json +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, NewType, Self, final + +import pydantic +from pydantic import AliasChoices, BaseModel, model_serializer + +TypeName = NewType("TypeName", str) +FieldName = NewType("FieldName", str) + + +class SchemaModel(BaseModel, frozen=True): + """Base class of a solr schema type.""" + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation of this schema model type.""" + return self.model_dump(by_alias=True, exclude_defaults=True) + + def to_json(self) -> str: + """Return this schema model as JSON.""" + return json.dumps(self.to_dict()) + + +@final +class Tokenizer(SchemaModel, frozen=True): + """A solr tokenizer: https://solr.apache.org/guide/solr/latest/indexing-guide/tokenizers.html.""" + + name: str + + +@final +class Tokenizers: + """Some predefined tokenizer.""" + + standard: Tokenizer = Tokenizer(name="standard") + whitespace: Tokenizer = Tokenizer(name="whitespace") + classic: Tokenizer = Tokenizer(name="classic") + + # https://solr.apache.org/guide/solr/latest/indexing-guide/tokenizers.html#uax29-url-email-tokenizer + uax29UrlEmail: Tokenizer = Tokenizer(name="uax29UrlEmail") + icu: Tokenizer = Tokenizer(name="icu") + openNlp: Tokenizer = Tokenizer(name="openNlp") + + +@final +class Filter(BaseModel): + """Defines a SOLR filter. See https://solr.apache.org/guide/solr/latest/indexing-guide/filters.html.""" + + name: str + settings: dict | None = None + + @model_serializer() + def to_dict(self) -> dict[str, Any]: + """Return a dict representation for this filter.""" + match self.settings: + case None: + return {"name": self.name} + case _: + data = self.settings.copy() + data.update({"name": self.name}) + return data + + +@final +class Filters: + """A list of predefined filters supported by SOLR.""" + + ascii_folding = Filter(name="asciiFolding") + lowercase = Filter(name="lowercase") + stop = Filter(name="stop") + english_minimal_stem = Filter(name="englishMinimalStem") + classic = Filter(name="classic") + ngram = Filter(name="nGram") + + @classmethod + def edgeNgram(cls, min_gram_size: int = 3, maxGramSize: int = 6, preserve_original: bool = True) -> Filter: + """Create a edgeNGram filter with the given settings.""" + return Filter( + name="edgeNGram", + settings={ + "minGramSize": f"{min_gram_size}", + "maxGramSize": f"{maxGramSize}", + "preserveOriginal": f"{json.dumps(preserve_original)}", + }, + ) + + +@final +class Analyzer(SchemaModel, frozen=True): + """A solr analyzer: https://solr.apache.org/guide/solr/latest/indexing-guide/analyzers.html.""" + + tokenizer: Tokenizer + filters: list[Filter] = pydantic.Field(default_factory=list) + + +FieldTypeClass = NewType("FieldTypeClass", str) + + +@final +class FieldTypeClasses: + """A list of field type classses.""" + + type_int = FieldTypeClass("IntPointField") + type_long = FieldTypeClass("LongPointField") + type_float = FieldTypeClass("FloatPointField") + type_double = FieldTypeClass("DoublePointField") + type_text = FieldTypeClass("TextField") + type_str = FieldTypeClass("StrField") + type_uuid = FieldTypeClass("UUIDField") + type_rank = FieldTypeClass("RankField") + type_date_point = FieldTypeClass("DatePointField") + type_date_range = FieldTypeClass("DateRangeField") + type_bool = FieldTypeClass("BoolField") + + +@final +class FieldType(SchemaModel, frozen=True): + """A solr field type: https://solr.apache.org/guide/solr/latest/indexing-guide/field-type-definitions-and-properties.html.""" + + name: TypeName + clazz: FieldTypeClass = pydantic.Field(validation_alias=AliasChoices("clazz", "class"), serialization_alias="class") + indexAnalyzer: Analyzer | None = None + queryAnalyzer: Analyzer | None = None + required: bool = False + indexed: bool = False + stored: bool = True + multiValued: bool = False + uninvertible: bool = False + docValues: bool = False + sortMissingLast: bool = True + + def make_doc_value(self) -> Self: + """Return a copy with docValues=True.""" + return self.model_copy(update={"docValues": True}) + + def make_multi_valued(self) -> Self: + """Return a copy with multiValued=True.""" + return self.model_copy(update={"multiValued": True}) + + def with_analyzer(self, a: Analyzer) -> Self: + """Return a copy with both analyzers set to the given one.""" + return self.model_copy(update={"queryAnalyzer": a, "indexAnalyzer": a}) + + def with_query_analyzer(self, a: Analyzer) -> Self: + """Return a copy with query analyzers set to the given one.""" + return self.model_copy(update={"queryAnalyzer": a}) + + def with_index_analyzer(self, a: Analyzer) -> Self: + """Return a copy with index analyzers set to the given one.""" + return self.model_copy(update={"indexAnalyzer": a}) + + @classmethod + def id(cls, name: TypeName) -> FieldType: + """Create a field that can be used as a document id.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_str) + + @classmethod + def text(cls, name: TypeName) -> FieldType: + """Create a text field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_text) + + @classmethod + def str(cls, name: TypeName) -> FieldType: + """Create a StrField field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_str) + + @classmethod + def int(cls, name: TypeName) -> FieldType: + """Create an IntPointField field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_int) + + @classmethod + def long(cls, name: TypeName) -> FieldType: + """Create a LongPointField field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_long) + + @classmethod + def boolean(cls, name: TypeName) -> FieldType: + """Create a boolean field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_bool) + + @classmethod + def double(cls, name: TypeName) -> FieldType: + """Create a DoublePointField field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_double) + + @classmethod + def date_time(cls, name: TypeName) -> FieldType: + """Create a DateRange field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_date_range) + + @classmethod + def date_time_point(cls, name: TypeName) -> FieldType: + """Create a DatePoint field type.""" + return FieldType(name=name, clazz=FieldTypeClasses.type_date_point) + + +@final +class Field(SchemaModel, frozen=True): + """A solr field: https://solr.apache.org/guide/solr/latest/indexing-guide/fields.html.""" + + name: FieldName + type: TypeName + required: bool = False + indexed: bool = True + stored: bool = True + multiValued: bool = False + uninvertible: bool = False + docValues: bool = True + + @classmethod + def of(cls, name: FieldName, type: FieldType) -> Field: + """Alternative constructor given a `FieldType` instead of a `TypeName`.""" + return Field(name=name, type=type.name) + + def make_multi_valued(self) -> Self: + """Return a copy with multiValued=True.""" + return self.model_copy(update={"multiValued": True}) + + +@final +class DynamicFieldRule(SchemaModel, frozen=True): + """A solr dynamic field: https://solr.apache.org/guide/solr/latest/indexing-guide/dynamic-fields.html.""" + + name: FieldName + type: TypeName + required: bool = False + indexed: bool = True + stored: bool = True + multiValued: bool = False + uninvertible: bool = False + docValues: bool = True + + +@final +class CopyFieldRule(SchemaModel, frozen=True): + """A solr copy field: https://solr.apache.org/guide/solr/latest/indexing-guide/copy-fields.html.""" + + source: FieldName + dest: FieldName + maxChars: int | None = None + + +class SchemaCommand(ABC): + """A base class for a schema command. + + A schema command is a single action modifying the solr schema. + See https://solr.apache.org/guide/solr/latest/indexing-guide/schema-api.html + """ + + @abstractmethod + def to_dict(self) -> dict[str, Any]: + """Return the dict representation for this schema command.""" + ... + + @abstractmethod + def command_name(self) -> str: + """Return the command name.""" + ... + + +@dataclass +@final +class AddCommand(SchemaCommand): + """SchemaCommand to add a field, field-type, dynamic field or copy field.""" + + value: Field | FieldType | DynamicFieldRule | CopyFieldRule + + def command_name(self) -> str: + """Return the command name.""" + match self.value: + case Field(): + return "add-field" + + case FieldType(): + return "add-field-type" + + case DynamicFieldRule(): + return "add-dynamic-field" + + case CopyFieldRule(): + return "add-copy-field" + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation for this schema command.""" + match self.value: + case Field() as f: + return f.to_dict() + + case FieldType() as f: + return f.to_dict() + + case DynamicFieldRule() as f: + return f.to_dict() + + case CopyFieldRule() as f: + return f.to_dict() + + +@dataclass +@final +class ReplaceCommand(SchemaCommand): + """Replace a field, field type or dynamic field.""" + + value: FieldType | Field | DynamicFieldRule + + def command_name(self) -> str: + """Return the command name.""" + match self.value: + case Field(): + return "replace-field" + case FieldType(): + return "replace-field-type" + case DynamicFieldRule(): + return "replace-dynamic-field" + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation for this schema command.""" + match self.value: + case Field() as f: + return f.to_dict() + case FieldType() as f: + return f.to_dict() + case DynamicFieldRule() as f: + return f.to_dict() + + +@dataclass +@final +class DeleteFieldCommand(SchemaCommand): + """Delete a field.""" + + name: FieldName + + def command_name(self) -> str: + """Return the command name.""" + return "delete-field" + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation for this schema command.""" + return {"name": self.name} + + +@dataclass +@final +class DeleteFieldTypeCommand(SchemaCommand): + """Delete a field type.""" + + name: TypeName + + def command_name(self) -> str: + """Return the command name.""" + return "delete-field-type" + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation for this schema command.""" + return {"name": self.name} + + +@dataclass +@final +class DeleteDynamicFieldCommand(SchemaCommand): + """Delete a dynamic field.""" + + name: FieldName + + def command_name(self) -> str: + """Return the command name.""" + return "delete-dynamic-field" + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation for this schema command.""" + return {"name": self.name} + + +@dataclass +@final +class DeleteCopyFieldCommand(SchemaCommand): + """Delete a copy field rule.""" + + source: FieldName + dest: FieldName + + def command_name(self) -> str: + """Return the command name.""" + return "delete-copy-field" + + def to_dict(self) -> dict[str, Any]: + """Return the dict representation for this schema command.""" + return {"source": self.source, "dest": self.dest} + + +@dataclass +@final +class SchemaCommandList: + """A list of `SchemaCommand`s that provide a to_json method as expected by the solr schema api.""" + + value: list[SchemaCommand] + + def is_not_empty(self) -> bool: + """The command list is non empty.""" + return not self.value + + def is_empty(self) -> bool: + """The command list is empty.""" + return not self.is_not_empty() + + def to_json(self) -> str: + """Return the JSON for all schema commands. + + Solr uses multiple same named keys in a JSON object to refer to multiple schema + commands. So this implementation is a bit awkward to produce the required format. + """ + result = "{" + for e in self.value: + result += '"' + e.command_name() + '":' + result += json.dumps(e.to_dict()) + result += "," + + result = result[:-1] + "}" + return result + + +@final +class CoreSchema(BaseModel): + """The complete schema of a solr core.""" + + name: str + version: float + uniqueKey: FieldName + fieldTypes: list[FieldType] = pydantic.Field(default_factory=list) + fields: list[Field] = pydantic.Field(default_factory=list) + dynamicFields: list[DynamicFieldRule] = pydantic.Field(default_factory=list) + copyFields: list[CopyFieldRule] = pydantic.Field(default_factory=list) diff --git a/components/renku_data_services/storage/README.md b/components/renku_data_services/storage/README.md index ed8b5685b..898bbee02 100644 --- a/components/renku_data_services/storage/README.md +++ b/components/renku_data_services/storage/README.md @@ -2,4 +2,4 @@ Contains APIspec generated schemas for cloud storage functionality. -To create the RClone schema, run `rclone config providers > rclone_schema.autogenerated.json`. +To create the RClone schema, run `rclone config providers | jq 'sort_by(.Name)' > rclone_schema.autogenerated.json`. diff --git a/components/renku_data_services/storage/api.spec.yaml b/components/renku_data_services/storage/api.spec.yaml index b02d3dffd..467f6ebf4 100644 --- a/components/renku_data_services/storage/api.spec.yaml +++ b/components/renku_data_services/storage/api.spec.yaml @@ -365,6 +365,11 @@ components: type: array items: $ref: "#/components/schemas/RCloneOption" + required: + - name + - description + - prefix + - options RCloneOption: type: object description: Single field on an RClone storage, like "remote" or "access_key_id" @@ -405,6 +410,9 @@ components: provider: type: string description: The provider this value is applicable for. Empty if valid for all providers. + required: + - value + - help required: type: boolean description: whether the option is required or not @@ -423,7 +431,18 @@ components: type: type: string description: data type of option value. RClone has more options but they map to the ones listed here. - enum: ["int", "bool", "string", "Time", "Duration", "MultiEncoder", "SizeSuffix", "SpaceSepList", "CommaSepList", "Tristate"] + enum: ["int", "bool", "string", "stringArray", "Time", "Duration", "MultiEncoder", "SizeSuffix", "SpaceSepList", "CommaSepList", "Tristate", "Encoding", "Bits"] + required: + - name + - help + - default + - default_str + - required + - ispassword + - sensitive + - advanced + - exclusive + - type Ulid: description: ULID identifier type: string diff --git a/components/renku_data_services/storage/apispec.py b/components/renku_data_services/storage/apispec.py index 7b468e287..a5b7a1465 100644 --- a/components/renku_data_services/storage/apispec.py +++ b/components/renku_data_services/storage/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-10-28T17:26:56+00:00 +# timestamp: 2025-06-19T07:18:05+00:00 from __future__ import annotations @@ -18,10 +18,8 @@ class RCloneConfigValidate( class Example(BaseAPISpec): - value: Optional[str] = Field( - None, description="a potential value for the option (think enum)" - ) - help: Optional[str] = Field(None, description="help text for the value") + value: str = Field(..., description="a potential value for the option (think enum)") + help: str = Field(..., description="help text for the value") provider: Optional[str] = Field( None, description="The provider this value is applicable for. Empty if valid for all providers.", @@ -32,6 +30,7 @@ class Type(Enum): int = "int" bool = "bool" string = "string" + stringArray = "stringArray" Time = "Time" Duration = "Duration" MultiEncoder = "MultiEncoder" @@ -39,45 +38,45 @@ class Type(Enum): SpaceSepList = "SpaceSepList" CommaSepList = "CommaSepList" Tristate = "Tristate" + Encoding = "Encoding" + Bits = "Bits" class RCloneOption(BaseAPISpec): - name: Optional[str] = Field(None, description="name of the option") - help: Optional[str] = Field(None, description="help text for the option") + name: str = Field(..., description="name of the option") + help: str = Field(..., description="help text for the option") provider: Optional[str] = Field( None, description="The cloud provider the option is for (See 'provider' RCloneOption in the schema for potential values)", - example="AWS", + examples=["AWS"], ) - default: Optional[Union[float, str, bool, Dict[str, Any], List]] = Field( - None, description="default value for the option" + default: Union[float, str, bool, Dict[str, Any], List] = Field( + ..., description="default value for the option" ) - default_str: Optional[str] = Field( - None, description="string representation of the default value" + default_str: str = Field( + ..., description="string representation of the default value" ) examples: Optional[List[Example]] = Field( None, description="These list potential values for this option, like an enum. With `exclusive: true`, only a value from the list is allowed.", ) - required: Optional[bool] = Field( - None, description="whether the option is required or not" - ) - ispassword: Optional[bool] = Field( - None, description="whether the field is a password (use **** for display)" + required: bool = Field(..., description="whether the option is required or not") + ispassword: bool = Field( + ..., description="whether the field is a password (use **** for display)" ) - sensitive: Optional[bool] = Field( - None, + sensitive: bool = Field( + ..., description="whether the value is sensitive (not stored in the service). Do not send this in requests to the service.", ) - advanced: Optional[bool] = Field( - None, + advanced: bool = Field( + ..., description="whether this is an advanced config option (probably don't show these to users)", ) - exclusive: Optional[bool] = Field( - None, description="if true, only values from 'examples' can be used" + exclusive: bool = Field( + ..., description="if true, only values from 'examples' can be used" ) - type: Optional[Type] = Field( - None, + type: Type = Field( + ..., description="data type of option value. RClone has more options but they map to the ones listed here.", ) @@ -96,17 +95,19 @@ class GitlabProjectId(RootModel[str]): root: str = Field( ..., description="Project id of a gitlab project (only int project id allowed, encoded as string for future-proofing)", - example="123456", + examples=["123456"], pattern="^[0-9]+$", ) class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): @@ -120,7 +121,7 @@ class StorageParams(BaseAPISpec): project_id: Optional[str] = Field( None, description="Project id of a gitlab project (only int project id allowed, encoded as string for future-proofing)", - example="123456", + examples=["123456"], pattern="^[0-9]+$", ) @@ -134,7 +135,7 @@ class StorageSchemaTestConnectionPostRequest(BaseAPISpec): source_path: str = Field( ..., description="the source path to mount, usually starts with bucket/container name", - example="bucket/my/storage/folder/", + examples=["bucket/my/storage/folder/"], ) @@ -154,7 +155,7 @@ class CloudStorageUrl(ProjectId): target_path: str = Field( ..., description="the target path relative to the repository where the storage should be mounted", - example="my/project/folder", + examples=["my/project/folder"], ) readonly: bool = Field( True, description="Whether this storage should be mounted readonly or not" @@ -173,12 +174,12 @@ class CloudStorage(ProjectId): source_path: str = Field( ..., description="the source path to mount, usually starts with bucket/container name", - example="bucket/my/storage/folder/", + examples=["bucket/my/storage/folder/"], ) target_path: str = Field( ..., description="the target path relative to the repository where the storage should be mounted", - example="my/project/folder", + examples=["my/project/folder"], ) readonly: bool = Field( True, description="Whether this storage should be mounted readonly or not" @@ -203,12 +204,12 @@ class CloudStoragePatch(BaseAPISpec): source_path: Optional[str] = Field( None, description="the source path to mount, usually starts with bucket/container name", - example="bucket/my/storage/folder/", + examples=["bucket/my/storage/folder/"], ) target_path: Optional[str] = Field( None, description="the target path relative to the repository where the storage should be mounted", - example="my/project/folder", + examples=["my/project/folder"], ) readonly: Optional[bool] = Field( None, description="Whether this storage should be mounted readonly or not" @@ -231,13 +232,11 @@ class CloudStorageGet(BaseAPISpec): class RCloneEntry(BaseAPISpec): - name: Optional[str] = Field(None, description="Human readable name of the provider") - description: Optional[str] = Field(None, description="description of the provider") - prefix: Optional[str] = Field( - None, description="Machine readable name of the provider" - ) - options: Optional[List[RCloneOption]] = Field( - None, description="Fields/properties used for this storage." + name: str = Field(..., description="Human readable name of the provider") + description: str = Field(..., description="description of the provider") + prefix: str = Field(..., description="Machine readable name of the provider") + options: List[RCloneOption] = Field( + ..., description="Fields/properties used for this storage." ) diff --git a/components/renku_data_services/storage/blueprints.py b/components/renku_data_services/storage/blueprints.py index 6aa0a46c0..270adb602 100644 --- a/components/renku_data_services/storage/blueprints.py +++ b/components/renku_data_services/storage/blueprints.py @@ -3,6 +3,7 @@ from dataclasses import dataclass from typing import Any +from pydantic import ValidationError as PydanticValidationError from sanic import HTTPResponse, Request, empty from sanic.response import JSONResponse from sanic_ext import validate @@ -21,12 +22,23 @@ def dump_storage_with_sensitive_fields(storage: models.CloudStorage, validator: RCloneValidator) -> dict[str, Any]: """Dump a CloudStorage model alongside sensitive fields.""" - return apispec.CloudStorageGet.model_validate( - { - "storage": apispec.CloudStorageWithId.model_validate(storage).model_dump(exclude_none=True), - "sensitive_fields": validator.get_private_fields(storage.configuration), - } - ).model_dump(exclude_none=True) + try: + body = apispec.CloudStorageGet.model_validate( + { + "storage": apispec.CloudStorageWithId.model_validate(storage).model_dump(exclude_none=True), + "sensitive_fields": [ + option.model_dump(exclude_none=True, by_alias=True) + for option in validator.get_private_fields(storage.configuration) + ], + } + ).model_dump(exclude_none=True) + except PydanticValidationError as err: + parts = [".".join(str(i) for i in field["loc"]) + ": " + field["msg"] for field in err.errors()] + message = ( + f"The server could not construct a valid response. Errors found in the following fields: {', '.join(parts)}" + ) + raise errors.ProgrammingError(message=message) from err + return body @dataclass(kw_only=True) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 1eb013267..48d00bce1 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -8,9 +8,12 @@ from typing import TYPE_CHECKING, Any, NamedTuple, Union, cast from pydantic import BaseModel, Field, ValidationError -from sanic.log import logger from renku_data_services import errors +from renku_data_services.app_config import logging +from renku_data_services.storage.rclone_patches import BANNED_STORAGE, apply_patches + +logger = logging.getLogger(__name__) if TYPE_CHECKING: from renku_data_services.storage.models import RCloneConfig @@ -23,20 +26,6 @@ class ConnectionResult(NamedTuple): error: str -BANNED_STORAGE = { - "alias", - "crypt", - "cache", - "chunker", - "combine", - "compress", - "hasher", - "local", - "memory", - "union", -} - - class RCloneValidator: """Class for validating RClone configs.""" @@ -45,7 +34,7 @@ def __init__(self) -> None: with open(Path(__file__).parent / "rclone_schema.autogenerated.json") as f: spec = json.load(f) - self.apply_patches(spec) + apply_patches(spec) self.providers: dict[str, RCloneProviderSchema] = {} @@ -57,176 +46,6 @@ def __init__(self) -> None: logger.error("Couldn't load RClone config: %s", provider_config) raise - @staticmethod - def __patch_schema_remove_unsafe(spec: list[dict[str, Any]]) -> None: - """Remove storages that aren't safe to use in the service.""" - indices = [i for i, v in enumerate(spec) if v["Prefix"] in BANNED_STORAGE] - for i in sorted(indices, reverse=True): - spec.pop(i) - - @staticmethod - def __patch_schema_sensitive(spec: list[dict[str, Any]]) -> None: - """Fix sensitive settings on providers.""" - for storage in spec: - if storage["Prefix"] == "azureblob": - for option in storage["Options"]: - if option["Name"] == "account": - option["Sensitive"] = False - if storage["Prefix"] == "webdav": - for option in storage["Options"]: - if option["Name"] == "user": - option["Sensitive"] = False - if option["Name"] == "pass": - option["Sensitive"] = True - - @staticmethod - def __patch_schema_s3_endpoint_required(spec: list[dict[str, Any]]) -> None: - """Make endpoint required for 'Other' provider.""" - for storage in spec: - if storage["Prefix"] == "s3": - for option in storage["Options"]: - if option["Name"] == "endpoint" and option["Provider"].startswith( - "!AWS,ArvanCloud,IBMCOS,IDrive,IONOS," - ): - option["Required"] = True - - @staticmethod - def __patch_schema_add_switch_provider(spec: list[dict[str, Any]]) -> None: - """Adds a fake provider to help with setting up switch storage.""" - s3 = next(s for s in spec if s["Prefix"] == "s3") - providers = next(o for o in s3["Options"] if o["Name"] == "provider") - providers["Examples"].append({"Value": "Switch", "Help": "Switch Object Storage", "Provider": ""}) - s3["Options"].append( - { - "Name": "endpoint", - "Help": "Endpoint for Switch S3 API.", - "Provider": "Switch", - "Default": "https://s3-zh.os.switch.ch", - "Value": None, - "Examples": [ - {"Value": "https://s3-zh.os.switch.ch", "Help": "Cloudian Hyperstore (ZH)", "Provider": ""}, - {"Value": "https://os.zhdk.cloud.switch.ch", "Help": "Ceph Object Gateway (ZH)", "Provider": ""}, - {"Value": "https://os.unil.cloud.switch.ch", "Help": "Ceph Object Gateway (LS)", "Provider": ""}, - ], - "ShortOpt": "", - "Hide": 0, - "Required": True, - "IsPassword": False, - "NoPrefix": False, - "Advanced": False, - "Exclusive": True, - "Sensitive": False, - "DefaultStr": "", - "ValueStr": "", - "Type": "string", - } - ) - existing_endpoint_spec = next( - o for o in s3["Options"] if o["Name"] == "endpoint" and o["Provider"].startswith("!AWS,") - ) - existing_endpoint_spec["Provider"] += ",Switch" - - @staticmethod - def __patch_schema_add_openbis_type(spec: list[dict[str, Any]]) -> None: - """Adds a fake type to help with setting up openBIS storage.""" - spec.append( - { - "Name": "openbis", - "Description": "openBIS", - "Prefix": "openbis", - "Options": [ - { - "Name": "host", - "Help": 'openBIS host to connect to.\n\nE.g. "openbis-eln-lims.ethz.ch".', - "Provider": "", - "Default": "", - "Value": None, - "Examples": [ - { - "Value": "openbis-eln-lims.ethz.ch", - "Help": "Public openBIS demo instance", - "Provider": "", - }, - ], - "ShortOpt": "", - "Hide": 0, - "Required": True, - "IsPassword": False, - "NoPrefix": False, - "Advanced": False, - "Exclusive": False, - "Sensitive": False, - "DefaultStr": "", - "ValueStr": "", - "Type": "string", - }, - { - "Name": "session_token", - "Help": "openBIS session token", - "Provider": "", - "Default": "", - "Value": None, - "ShortOpt": "", - "Hide": 0, - "Required": True, - "IsPassword": True, - "NoPrefix": False, - "Advanced": False, - "Exclusive": False, - "Sensitive": True, - "DefaultStr": "", - "ValueStr": "", - "Type": "string", - }, - ], - "CommandHelp": None, - "Aliases": None, - "Hide": False, - "MetadataInfo": None, - } - ) - - @staticmethod - def __patch_schema_remove_oauth_propeties(spec: list[dict[str, Any]]) -> None: - """Removes OAuth2 fields since we can't do an oauth flow in the rclone CSI.""" - providers = [ - "acd", - "box", - "drive", - "dropbox", - "gcs", - "gphotos", - "hidrive", - "jottacloud", - "mailru", - "onedrive", - "pcloud", - "pikpak", - "premiumzeme", - "putio", - "sharefile", - "yandex", - "zoho", - ] - for storage in spec: - if storage["Prefix"] in providers: - options = [] - for option in storage["Options"]: - if option["Name"] not in ["client_id", "client_secret"]: - options.append(option) - storage["Options"] = options - - def apply_patches(self, spec: list[dict[str, Any]]) -> None: - """Apply patches to RClone schema.""" - patches = [ - getattr(self, m) - for m in dir(self) - if callable(getattr(self, m)) and m.startswith("_RCloneValidator__patch_schema_") - ] - - for patch in patches: - patch(spec) - def validate(self, configuration: Union["RCloneConfig", dict[str, Any]], keep_sensitive: bool = False) -> None: """Validates an RClone config.""" provider = self.get_provider(configuration) @@ -270,20 +89,29 @@ async def test_connection( except errors.ValidationError as e: return ConnectionResult(False, str(e)) - obscured_rclone_config = await self.obscure_config(self.get_real_configuration(configuration)) + # Obscure configuration and transform if needed + obscured_config = await self.obscure_config(configuration) + transformed_config = self.transform_polybox_switchdriver_config(obscured_config) with tempfile.NamedTemporaryFile(mode="w+", delete=False, encoding="utf-8") as f: - obscured_rclone_config_string = "\n".join(f"{k}={v}" for k, v in obscured_rclone_config.items()) - f.write(f"[temp]\n{obscured_rclone_config_string}") + config = "\n".join(f"{k}={v}" for k, v in transformed_config.items()) + f.write(f"[temp]\n{config}") f.close() - proc = await asyncio.create_subprocess_exec( - "rclone", + args = [ "lsf", "--low-level-retries=1", # Connection tests should fail fast. "--retries=1", # Connection tests should fail fast. "--config", f.name, f"temp:{source_path}", + ] + # Handle SFTP retries, see https://github.com/SwissDataScienceCenter/renku-data-services/issues/893 + storage_type = cast(str, configuration.get("type")) + if storage_type == "sftp": + args.extend(["--low-level-retries", "1"]) + proc = await asyncio.create_subprocess_exec( + "rclone", + *args, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE, ) @@ -326,7 +154,7 @@ def get_provider(self, configuration: Union["RCloneConfig", dict[str, Any]]) -> def asdict(self) -> list[dict[str, Any]]: """Return Schema as dict.""" - return [provider.model_dump(exclude_none=True) for provider in self.providers.values()] + return [provider.model_dump(exclude_none=True, by_alias=True) for provider in self.providers.values()] def get_private_fields( self, configuration: Union["RCloneConfig", dict[str, Any]] @@ -335,12 +163,84 @@ def get_private_fields( provider = self.get_provider(configuration) return provider.get_private_fields(configuration) + async def get_doi_metadata(self, configuration: Union["RCloneConfig", dict[str, Any]]) -> "RCloneDOIMetadata": + """Returns the metadata of a DOI remote.""" + provider = self.get_provider(configuration) + if provider.name != "doi": + raise errors.ValidationError(message="Configuration is not of type DOI") + + # Obscure configuration and transform if needed + obscured_config = await self.obscure_config(configuration) + + with tempfile.NamedTemporaryFile(mode="w+", delete=False, encoding="utf-8") as f: + config = "\n".join(f"{k}={v}" for k, v in obscured_config.items()) + f.write(f"[temp]\n{config}") + f.close() + proc = await asyncio.create_subprocess_exec( + "rclone", + "backend", + "metadata", + "--config", + f.name, + "temp:", + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + ) + stdout, stderr = await proc.communicate() + success = proc.returncode == 0 + if success: + metadata = RCloneDOIMetadata.model_validate_json(stdout.decode().strip()) + return metadata + raise errors.ValidationError( + message=f"Could not resolve DOI {configuration.get("doi", "")} or the hosting platform is not supported", # noqa E501 + detail=f"Reason: {stderr.decode().strip()}", + ) + + @staticmethod + def transform_polybox_switchdriver_config( + configuration: Union["RCloneConfig", dict[str, Any]], + ) -> Union["RCloneConfig", dict[str, Any]]: + """Transform the configuration for public access.""" + storage_type = configuration.get("type") + + # Only process Polybox or SwitchDrive configurations + if storage_type not in {"polybox", "switchDrive"}: + return configuration + + configuration["type"] = "webdav" + + provider = configuration.get("provider") + + if provider == "personal": + configuration["url"] = configuration.get("url") or ( + "https://polybox.ethz.ch/remote.php/webdav/" + if storage_type == "polybox" + else "https://drive.switch.ch/remote.php/webdav/" + ) + return configuration + + ## Set url and username when is a shared configuration + configuration["url"] = ( + "https://polybox.ethz.ch/public.php/webdav/" + if storage_type == "polybox" + else "https://drive.switch.ch/public.php/webdav/" + ) + public_link = configuration.get("public_link") + + if not public_link: + raise ValueError("Missing 'public_link' for public access configuration.") + + # Extract the user from the public link + configuration["user"] = public_link.split("/")[-1] + + return configuration + class RCloneTriState(BaseModel): """Represents a Tristate of true|false|unset.""" - value: bool = Field(alias="Value") - valid: bool = Field(alias="Valid") + value: bool = Field(validation_alias="Value") + valid: bool = Field(validation_alias="Valid") class RCloneExample(BaseModel): @@ -350,31 +250,31 @@ class RCloneExample(BaseModel): be used, potentially further filtered by `provider` if a provider is selected. """ - value: str = Field(alias="Value") - help: str = Field(alias="Help") - provider: str = Field(alias="Provider") + value: str = Field(validation_alias="Value") + help: str = Field(validation_alias="Help") + provider: str | None = Field(validation_alias="Provider", default=None) class RCloneOption(BaseModel): """Option for an RClone provider.""" - name: str = Field(alias="Name") - help: str = Field(alias="Help") - provider: str = Field(alias="Provider") - default: str | int | bool | list[str] | RCloneTriState | None = Field(alias="Default") - value: str | int | bool | RCloneTriState | None = Field(alias="Value") - examples: list[RCloneExample] | None = Field(default=None, alias="Examples") - short_opt: str = Field(alias="ShortOpt") - hide: int = Field(alias="Hide") - required: bool = Field(alias="Required") - is_password: bool = Field(alias="IsPassword") - no_prefix: bool = Field(alias="NoPrefix") - advanced: bool = Field(alias="Advanced") - exclusive: bool = Field(alias="Exclusive") - sensitive: bool = Field(alias="Sensitive") - default_str: str = Field(alias="DefaultStr") - value_str: str = Field(alias="ValueStr") - type: str = Field(alias="Type") + name: str = Field(validation_alias="Name") + help: str = Field(validation_alias="Help") + provider: str | None = Field(validation_alias="Provider", default=None) + default: str | int | bool | list[str] | RCloneTriState | None = Field(validation_alias="Default") + value: str | int | bool | RCloneTriState | None = Field(validation_alias="Value") + examples: list[RCloneExample] | None = Field(default=None, validation_alias="Examples") + short_opt: str | None = Field(validation_alias="ShortOpt", default=None) + hide: int = Field(validation_alias="Hide") + required: bool = Field(validation_alias="Required") + is_password: bool = Field(validation_alias="IsPassword", serialization_alias="ispassword") + no_prefix: bool = Field(validation_alias="NoPrefix") + advanced: bool = Field(validation_alias="Advanced") + exclusive: bool = Field(validation_alias="Exclusive") + sensitive: bool = Field(validation_alias="Sensitive") + default_str: str = Field(validation_alias="DefaultStr") + value_str: str = Field(validation_alias="ValueStr") + type: str = Field(validation_alias="Type") @property def is_sensitive(self) -> bool: @@ -441,14 +341,14 @@ def validate_config( class RCloneProviderSchema(BaseModel): """Schema for an RClone provider.""" - name: str = Field(alias="Name") - description: str = Field(alias="Description") - prefix: str = Field(alias="Prefix") - options: list[RCloneOption] = Field(alias="Options") - command_help: list[dict[str, Any]] | None = Field(alias="CommandHelp") - aliases: list[str] | None = Field(alias="Aliases") - hide: bool = Field(alias="Hide") - metadata_info: dict[str, Any] | None = Field(alias="MetadataInfo") + name: str = Field(validation_alias="Name") + description: str = Field(validation_alias="Description") + prefix: str = Field(validation_alias="Prefix") + options: list[RCloneOption] = Field(validation_alias="Options") + command_help: list[dict[str, Any]] | None = Field(validation_alias="CommandHelp") + aliases: list[str] | None = Field(validation_alias="Aliases") + hide: bool = Field(validation_alias="Hide") + metadata_info: dict[str, Any] | None = Field(validation_alias="MetadataInfo") @property def required_options(self) -> list[RCloneOption]: @@ -553,3 +453,12 @@ def get_private_fields( if option.name not in configuration: continue yield option + + +class RCloneDOIMetadata(BaseModel): + """Schema for metadata provided by rclone about a DOI remote.""" + + doi: str = Field(alias="DOI") + url: str = Field(alias="URL") + metadata_url: str = Field(alias="metadataURL") + provider: str = Field() diff --git a/components/renku_data_services/storage/rclone_patches.py b/components/renku_data_services/storage/rclone_patches.py new file mode 100644 index 000000000..230a4b808 --- /dev/null +++ b/components/renku_data_services/storage/rclone_patches.py @@ -0,0 +1,258 @@ +"""Patches to apply to phe rclone storage schema.""" + +from copy import deepcopy +from typing import Any, Final + +from renku_data_services import errors + +BANNED_STORAGE: Final[set[str]] = { + "alias", + "crypt", + "cache", + "chunker", + "combine", + "compress", + "hasher", + "local", + "memory", + "union", +} + +OAUTH_PROVIDERS: Final[set[str]] = { + "acd", + "box", + "drive", + "dropbox", + "gcs", + "gphotos", + "hidrive", + "jottacloud", + "mailru", + "onedrive", + "pcloud", + "pikpak", + "premiumzeme", + "putio", + "sharefile", + "yandex", + "zoho", +} + + +def find_storage(spec: list[dict[str, Any]], prefix: str) -> dict[str, Any]: + """Find and return the storage schema from the spec. + + This returns the original entry for in-place modification. + """ + storage = next((s for s in spec if s["Prefix"] == prefix), None) + if not storage: + raise errors.ValidationError(message=f"'{prefix}' storage not found in schema.") + return storage + + +def __patch_schema_remove_unsafe(spec: list[dict[str, Any]]) -> None: + """Remove storages that aren't safe to use in the service.""" + indices = [i for i, v in enumerate(spec) if v["Prefix"] in BANNED_STORAGE] + for i in sorted(indices, reverse=True): + spec.pop(i) + + +def __patch_schema_sensitive(spec: list[dict[str, Any]]) -> None: + """Fix sensitive settings on providers.""" + for storage in spec: + if storage["Prefix"] == "azureblob": + for option in storage["Options"]: + if option["Name"] == "account": + option["Sensitive"] = False + if storage["Prefix"] == "webdav": + for option in storage["Options"]: + if option["Name"] == "user": + option["Sensitive"] = False + if option["Name"] == "pass": + option["Sensitive"] = True + + +def __patch_schema_s3_endpoint_required(spec: list[dict[str, Any]]) -> None: + """Make endpoint required for 'Other' provider.""" + for storage in spec: + if storage["Prefix"] == "s3": + for option in storage["Options"]: + if option["Name"] == "endpoint" and option["Provider"].startswith( + "!AWS,ArvanCloud,IBMCOS,IDrive,IONOS," + ): + option["Required"] = True + + +def __patch_schema_add_switch_provider(spec: list[dict[str, Any]]) -> None: + """Adds a fake provider to help with setting up switch storage.""" + s3 = find_storage(spec, "s3") + providers = next(o for o in s3["Options"] if o["Name"] == "provider") + providers["Examples"].append({"Value": "Switch", "Help": "Switch Object Storage", "Provider": ""}) + s3["Options"].append( + { + "Name": "endpoint", + "Help": "Endpoint for Switch S3 API.", + "Provider": "Switch", + "Default": "https://s3-zh.os.switch.ch", + "Value": None, + "Examples": [ + {"Value": "https://s3-zh.os.switch.ch", "Help": "Cloudian Hyperstore (ZH)", "Provider": ""}, + {"Value": "https://os.zhdk.cloud.switch.ch", "Help": "Ceph Object Gateway (ZH)", "Provider": ""}, + {"Value": "https://os.unil.cloud.switch.ch", "Help": "Ceph Object Gateway (LS)", "Provider": ""}, + ], + "ShortOpt": "", + "Hide": 0, + "Required": True, + "IsPassword": False, + "NoPrefix": False, + "Advanced": False, + "Exclusive": True, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + "Type": "string", + } + ) + existing_endpoint_spec = next( + o for o in s3["Options"] if o["Name"] == "endpoint" and o["Provider"].startswith("!AWS,") + ) + existing_endpoint_spec["Provider"] += ",Switch" + + +def __patch_schema_remove_oauth_propeties(spec: list[dict[str, Any]]) -> None: + """Removes OAuth2 fields since we can't do an oauth flow in the rclone CSI.""" + for storage in spec: + if storage["Prefix"] in OAUTH_PROVIDERS: + options = [] + for option in storage["Options"]: + if option["Name"] not in ["client_id", "client_secret"]: + options.append(option) + storage["Options"] = options + + +def add_webdav_based_storage( + spec: list[dict[str, Any]], + prefix: str, + name: str, + description: str, + url_value: str, + public_link_help: str, +) -> None: + """Create a modified copy of WebDAV storage and add it to the schema.""" + # Find WebDAV storage schema and create a modified copy + storage_copy = deepcopy(find_storage(spec, "webdav")) + storage_copy.update({"Prefix": prefix, "Name": name, "Description": description}) + + custom_options = [ + { + "Name": "provider", + "Help": "Choose the mode to access the data source.", + "Provider": "", + "Default": "", + "Value": None, + "Examples": [ + { + "Value": "personal", + "Help": ( + "Connect to your personal storage space. " + "This data connector cannot be used to share access to a folder." + ), + "Provider": "", + }, + { + "Value": "shared", + "Help": ( + "Connect a 'public' folder shared with others. " + "A 'public' folder may or may not be protected with a password." + ), + "Provider": "", + }, + ], + "Required": True, + "Type": "string", + "ShortOpt": "", + "Hide": 0, + "IsPassword": False, + "NoPrefix": False, + "Advanced": False, + "Exclusive": True, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + }, + { + "Name": "public_link", + "Help": public_link_help, + "Provider": "shared", + "Default": "", + "Value": None, + "Examples": None, + "ShortOpt": "", + "Hide": 0, + "Required": True, + "IsPassword": False, + "NoPrefix": False, + "Advanced": False, + "Exclusive": False, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + "Type": "string", + }, + ] + storage_copy["Options"].extend(custom_options) + + # use provider to indicate if the option is for an personal o shared storage + for option in storage_copy["Options"]: + if option["Name"] == "url": + option.update({"Provider": "personal", "Default": url_value, "Required": False}) + elif option["Name"] in ["bearer_token", "bearer_token_command", "headers", "user"]: + option["Provider"] = "personal" + + # Remove obsolete options no longer applicable for Polybox or SwitchDrive + storage_copy["Options"] = [ + o for o in storage_copy["Options"] if o["Name"] not in ["vendor", "nextcloud_chunk_size"] + ] + + spec.append(storage_copy) + + +def __patch_polybox_storage(spec: list[dict[str, Any]]) -> None: + """Add polybox virtual storage that uses webdav.""" + add_webdav_based_storage( + spec, + prefix="polybox", + name="PolyBox", + description="Polybox", + url_value="https://polybox.ethz.ch/remote.php/webdav/", + public_link_help="Shared folder link. E.g., https://polybox.ethz.ch/index.php/s/8NffJ3rFyHaVyyy", + ) + + +def __patch_switchdrive_storage(spec: list[dict[str, Any]]) -> None: + """Add switchdrive virtual storage that uses webdav.""" + add_webdav_based_storage( + spec, + prefix="switchDrive", + name="SwitchDrive", + description="SwitchDrive", + url_value="https://drive.switch.ch/remote.php/webdav/", + public_link_help="Shared folder link. E.g., https://drive.switch.ch/index.php/s/OPSd72zrs5JG666", + ) + + +def apply_patches(spec: list[dict[str, Any]]) -> None: + """Apply patches to RClone schema.""" + patches = [ + __patch_schema_remove_unsafe, + __patch_schema_sensitive, + __patch_schema_s3_endpoint_required, + __patch_schema_add_switch_provider, + __patch_schema_remove_oauth_propeties, + __patch_polybox_storage, + __patch_switchdrive_storage, + __patch_schema_add_openbis_type, + ] + + for patch in patches: + patch(spec) diff --git a/components/renku_data_services/storage/rclone_schema.autogenerated.json b/components/renku_data_services/storage/rclone_schema.autogenerated.json index 021a63c70..6ffcf0576 100644 --- a/components/renku_data_services/storage/rclone_schema.autogenerated.json +++ b/components/renku_data_services/storage/rclone_schema.autogenerated.json @@ -1,17007 +1,20626 @@ [ - { - "Name": "alias", - "Description": "Alias for an existing remote", - "Prefix": "alias", - "Options": [ - { - "Name": "remote", - "Help": "Remote or path to alias.\n\nCan be \"myremote:path/to/dir\", \"myremote:bucket\", \"myremote:\" or \"/local/path\".", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - } + { + "Name": "alias", + "Description": "Alias for an existing remote", + "Prefix": "alias", + "Options": [ + { + "Name": "remote", + "FieldName": "", + "Help": "Remote or path to alias.\n\nCan be \"myremote:path/to/dir\", \"myremote:bucket\", \"myremote:\" or \"/local/path\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "azureblob", + "Description": "Microsoft Azure Blob Storage", + "Prefix": "azureblob", + "Options": [ + { + "Name": "account", + "FieldName": "", + "Help": "Azure Storage Account Name.\n\nSet this to the Azure Storage Account Name in use.\n\nLeave blank to use SAS URL or Emulator, otherwise it needs to be set.\n\nIf this is blank and if env_auth is set it will be read from the\nenvironment variable `AZURE_STORAGE_ACCOUNT_NAME` if possible.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "env_auth", + "FieldName": "", + "Help": "Read credentials from runtime (environment variables, CLI or MSI).\n\nSee the [authentication docs](/azureblob#authentication) for full info.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "key", + "FieldName": "", + "Help": "Storage Account Shared Key.\n\nLeave blank to use SAS URL or Emulator.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sas_url", + "FieldName": "", + "Help": "SAS URL for container level access only.\n\nLeave blank if using account/key or Emulator.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "tenant", + "FieldName": "", + "Help": "ID of the service principal's tenant. Also called its directory ID.\n\nSet this if using\n- Service principal with client secret\n- Service principal with certificate\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_id", + "FieldName": "", + "Help": "The ID of the client in use.\n\nSet this if using\n- Service principal with client secret\n- Service principal with certificate\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "One of the service principal's client secrets\n\nSet this if using\n- Service principal with client secret\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_certificate_path", + "FieldName": "", + "Help": "Path to a PEM or PKCS12 certificate file including the private key.\n\nSet this if using\n- Service principal with certificate\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_certificate_password", + "FieldName": "", + "Help": "Password for the certificate file (optional).\n\nOptionally set this if using\n- Service principal with certificate\n\nAnd the certificate has a password.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_send_certificate_chain", + "FieldName": "", + "Help": "Send the certificate chain when using certificate auth.\n\nSpecifies whether an authentication request will include an x5c header\nto support subject name / issuer based authentication. When set to\ntrue, authentication requests include the x5c header.\n\nOptionally set this if using\n- Service principal with certificate\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "username", + "FieldName": "", + "Help": "User name (usually an email address)\n\nSet this if using\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "The user's password\n\nSet this if using\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "service_principal_file", + "FieldName": "", + "Help": "Path to file containing credentials for use with a service principal.\n\nLeave blank normally. Needed only if you want to use a service principal instead of interactive login.\n\n $ az ad sp create-for-rbac --name \"\" \\\n --role \"Storage Blob Data Owner\" \\\n --scopes \"/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts//blobServices/default/containers/\" \\\n > azure-principal.json\n\nSee [\"Create an Azure service principal\"](https://docs.microsoft.com/en-us/cli/azure/create-an-azure-service-principal-azure-cli) and [\"Assign an Azure role for access to blob data\"](https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-rbac-cli) pages for more details.\n\nIt may be more convenient to put the credentials directly into the\nrclone config file under the `client_id`, `tenant` and `client_secret`\nkeys instead of setting `service_principal_file`.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "disable_instance_discovery", + "FieldName": "", + "Help": "Skip requesting Microsoft Entra instance metadata\n\nThis should be set true only by applications authenticating in\ndisconnected clouds, or private clouds such as Azure Stack.\n\nIt determines whether rclone requests Microsoft Entra instance\nmetadata from `https://login.microsoft.com/` before\nauthenticating.\n\nSetting this to true will skip this request, making you responsible\nfor ensuring the configured authority is valid and trustworthy.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_msi", + "FieldName": "", + "Help": "Use a managed service identity to authenticate (only works in Azure).\n\nWhen true, use a [managed service identity](https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/)\nto authenticate to Azure Storage instead of a SAS token or account key.\n\nIf the VM(SS) on which this program is running has a system-assigned identity, it will\nbe used by default. If the resource has no system-assigned but exactly one user-assigned identity,\nthe user-assigned identity will be used by default. If the resource has multiple user-assigned\nidentities, the identity to use must be explicitly specified using exactly one of the msi_object_id,\nmsi_client_id, or msi_mi_res_id parameters.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "msi_object_id", + "FieldName": "", + "Help": "Object ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_client_id or msi_mi_res_id specified.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "msi_client_id", + "FieldName": "", + "Help": "Object ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_object_id or msi_mi_res_id specified.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "msi_mi_res_id", + "FieldName": "", + "Help": "Azure resource ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_client_id or msi_object_id specified.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "use_emulator", + "FieldName": "", + "Help": "Uses local storage emulator if provided as 'true'.\n\nLeave blank if using real azure storage endpoint.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_az", + "FieldName": "", + "Help": "Use Azure CLI tool az for authentication\n\nSet to use the [Azure CLI tool az](https://learn.microsoft.com/en-us/cli/azure/)\nas the sole means of authentication.\n\nSetting this can be useful if you wish to use the az CLI on a host with\na System Managed Identity that you do not want to use.\n\nDon't set env_auth at the same time.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for the service.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to chunked upload (<= 256 MiB) (deprecated).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Upload chunk size.\n\nNote that this is stored in memory and there may be up to\n\"--transfers\" * \"--azureblob-upload-concurrency\" chunks stored at once\nin memory.", + "Default": 4194304, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4Mi", + "ValueStr": "4Mi", + "Type": "SizeSuffix" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nIf you are uploading small numbers of large files over high-speed\nlinks and these uploads do not fully utilize your bandwidth, then\nincreasing this may help to speed up the transfers.\n\nIn tests, upload speed increases almost linearly with upload\nconcurrency. For example to fill a gigabit pipe it may be necessary to\nraise this to 64. Note that this will use more memory.\n\nNote that chunks are stored in memory and there may be up to\n\"--transfers\" * \"--azureblob-upload-concurrency\" chunks stored at once\nin memory.", + "Default": 16, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "16", + "ValueStr": "16", + "Type": "int" + }, + { + "Name": "copy_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to multipart copy.\n\nAny files larger than this that need to be server-side copied will be\ncopied in chunks of chunk_size using the put block list API.\n\nFiles smaller than this limit will be copied with the Copy Blob API.", + "Default": 8388608, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "8Mi", + "ValueStr": "8Mi", + "Type": "SizeSuffix" + }, + { + "Name": "copy_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart copy.\n\nThis is the number of chunks of the same file that are copied\nconcurrently.\n\nThese chunks are not buffered in memory and Microsoft recommends\nsetting this value to greater than 1000 in the azcopy documentation.\n\nhttps://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize#increase-concurrency\n\nIn tests, copy speed increases almost linearly with copy\nconcurrency.", + "Default": 512, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "512", + "ValueStr": "512", + "Type": "int" + }, + { + "Name": "use_copy_blob", + "FieldName": "", + "Help": "Whether to use the Copy Blob API when copying to the same storage account.\n\nIf true (the default) then rclone will use the Copy Blob API for\ncopies to the same storage account even when the size is above the\ncopy_cutoff.\n\nRclone assumes that the same storage account means the same config\nand does not check for the same storage account in different configs.\n\nThere should be no need to change this value.\n", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "list_chunk", + "FieldName": "", + "Help": "Size of blob list.\n\nThis sets the number of blobs requested in each listing chunk. Default\nis the maximum, 5000. \"List blobs\" requests are permitted 2 minutes\nper megabyte to complete. If an operation is taking longer than 2\nminutes per megabyte on average, it will time out (\n[source](https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations#exceptions-to-default-timeout-interval)\n). This can be used to limit the number of blobs items to return, to\navoid the time out.", + "Default": 5000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "5000", + "ValueStr": "5000", + "Type": "int" + }, + { + "Name": "access_tier", + "FieldName": "", + "Help": "Access tier of blob: hot, cool, cold or archive.\n\nArchived blobs can be restored by setting access tier to hot, cool or\ncold. Leave blank if you intend to use default access tier, which is\nset at account level\n\nIf there is no \"access tier\" specified, rclone doesn't apply any tier.\nrclone performs \"Set Tier\" operation on blobs while uploading, if objects\nare not modified, specifying \"access tier\" to new one will have no effect.\nIf blobs are in \"archive tier\" at remote, trying to perform data transfer\noperations from remote will not be allowed. User should first restore by\ntiering blob to \"Hot\", \"Cool\" or \"Cold\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "archive_tier_delete", + "FieldName": "", + "Help": "Delete archive tier blobs before overwriting.\n\nArchive tier blobs cannot be updated. So without this flag, if you\nattempt to update an archive tier blob, then rclone will produce the\nerror:\n\n can't update archive tier blob without --azureblob-archive-tier-delete\n\nWith this flag set then before rclone attempts to overwrite an archive\ntier blob, it will delete the existing blob before uploading its\nreplacement. This has the potential for data loss if the upload fails\n(unlike updating a normal blob) and also may cost more since deleting\narchive tier blobs early may be chargable.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_checksum", + "FieldName": "", + "Help": "Don't store MD5 checksum with object metadata.\n\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "memory_pool_flush_time", + "FieldName": "", + "Help": "How often internal memory buffer pools will be flushed. (no longer used)", + "Default": 60000000000, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "memory_pool_use_mmap", + "FieldName": "", + "Help": "Whether to use mmap buffers in internal memory pool. (no longer used)", + "Default": false, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 21078018, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8", + "ValueStr": "Slash,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8", + "Type": "Encoding" + }, + { + "Name": "public_access", + "FieldName": "", + "Help": "Public access level of a container: blob or container.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "The container and its blobs can be accessed only with an authorized request.\nIt's a default value." + }, + { + "Value": "blob", + "Help": "Blob data within this container can be read via anonymous request." + }, + { + "Value": "container", + "Help": "Allow full public read access for container and blob data." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "amazon cloud drive", - "Description": "Amazon Drive", - "Prefix": "acd", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "checkpoint", - "Help": "Checkpoint for internal polling (debug).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "upload_wait_per_gb", - "Help": "Additional time per GiB to wait after a failed complete upload to see if it appears.\n\nSometimes Amazon Drive gives an error when a file has been fully\nuploaded but the file appears anyway after a little while. This\nhappens sometimes for files over 1 GiB in size and nearly every time for\nfiles bigger than 10 GiB. This parameter controls the time rclone waits\nfor the file to appear.\n\nThe default value for this parameter is 3 minutes per GiB, so by\ndefault it will wait 3 minutes for every GiB uploaded to see if the\nfile appears.\n\nYou can disable this feature by setting it to 0. This may cause\nconflict errors as rclone retries the failed upload but the file will\nmost likely appear correctly eventually.\n\nThese values were determined empirically by observing lots of uploads\nof big files for a range of file sizes.\n\nUpload with the \"-v\" flag to see more info about what rclone is doing\nin this situation.", - "Provider": "", - "Default": 180000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "3m0s", - "ValueStr": "3m0s", - "Type": "Duration" - }, - { - "Name": "templink_threshold", - "Help": "Files \u003e= this size will be downloaded via their tempLink.\n\nFiles this size or more will be downloaded via their \"tempLink\". This\nis to work around a problem with Amazon Drive which blocks downloads\nof files bigger than about 10 GiB. The default for this is 9 GiB which\nshouldn't need to be changed.\n\nTo download files above this threshold, rclone requests a \"tempLink\"\nwhich downloads the file through a temporary URL directly from the\nunderlying S3 storage.", - "Provider": "", - "Default": 9663676416, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "9Gi", - "ValueStr": "9Gi", - "Type": "SizeSuffix" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50331650, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,InvalidUtf8,Dot", - "ValueStr": "Slash,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "directory_markers", + "FieldName": "", + "Help": "Upload an empty object with a trailing slash when a new directory is created\n\nEmpty folders are unsupported for bucket based remotes, this option\ncreates an empty object ending with \"/\", to persist the folder.\n\nThis object also has the metadata \"hdi_isfolder = true\" to conform to\nthe Microsoft standard.\n ", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_check_container", + "FieldName": "", + "Help": "If set, don't attempt to check the container exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the container exists already.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_head_object", + "FieldName": "", + "Help": "If set, do not do HEAD before GET when getting objects.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "delete_snapshots", + "FieldName": "", + "Help": "Set to specify how to deal with snapshots on blob deletion.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "By default, the delete operation fails if a blob has snapshots" + }, + { + "Value": "include", + "Help": "Specify 'include' to remove the root blob and all its snapshots" + }, + { + "Value": "only", + "Help": "Specify 'only' to remove only the snapshots but keep the root blob." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "azureblob", - "Description": "Microsoft Azure Blob Storage", - "Prefix": "azureblob", - "Options": [ - { - "Name": "account", - "Help": "Azure Storage Account Name.\n\nSet this to the Azure Storage Account Name in use.\n\nLeave blank to use SAS URL or Emulator, otherwise it needs to be set.\n\nIf this is blank and if env_auth is set it will be read from the\nenvironment variable `AZURE_STORAGE_ACCOUNT_NAME` if possible.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "env_auth", - "Help": "Read credentials from runtime (environment variables, CLI or MSI).\n\nSee the [authentication docs](/azureblob#authentication) for full info.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "key", - "Help": "Storage Account Shared Key.\n\nLeave blank to use SAS URL or Emulator.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sas_url", - "Help": "SAS URL for container level access only.\n\nLeave blank if using account/key or Emulator.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "tenant", - "Help": "ID of the service principal's tenant. Also called its directory ID.\n\nSet this if using\n- Service principal with client secret\n- Service principal with certificate\n- User with username and password\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_id", - "Help": "The ID of the client in use.\n\nSet this if using\n- Service principal with client secret\n- Service principal with certificate\n- User with username and password\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "One of the service principal's client secrets\n\nSet this if using\n- Service principal with client secret\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_certificate_path", - "Help": "Path to a PEM or PKCS12 certificate file including the private key.\n\nSet this if using\n- Service principal with certificate\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_certificate_password", - "Help": "Password for the certificate file (optional).\n\nOptionally set this if using\n- Service principal with certificate\n\nAnd the certificate has a password.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_send_certificate_chain", - "Help": "Send the certificate chain when using certificate auth.\n\nSpecifies whether an authentication request will include an x5c header\nto support subject name / issuer based authentication. When set to\ntrue, authentication requests include the x5c header.\n\nOptionally set this if using\n- Service principal with certificate\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "username", - "Help": "User name (usually an email address)\n\nSet this if using\n- User with username and password\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password", - "Help": "The user's password\n\nSet this if using\n- User with username and password\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "service_principal_file", - "Help": "Path to file containing credentials for use with a service principal.\n\nLeave blank normally. Needed only if you want to use a service principal instead of interactive login.\n\n $ az ad sp create-for-rbac --name \"\u003cname\u003e\" \\\n --role \"Storage Blob Data Owner\" \\\n --scopes \"/subscriptions/\u003csubscription\u003e/resourceGroups/\u003cresource-group\u003e/providers/Microsoft.Storage/storageAccounts/\u003cstorage-account\u003e/blobServices/default/containers/\u003ccontainer\u003e\" \\\n \u003e azure-principal.json\n\nSee [\"Create an Azure service principal\"](https://docs.microsoft.com/en-us/cli/azure/create-an-azure-service-principal-azure-cli) and [\"Assign an Azure role for access to blob data\"](https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-rbac-cli) pages for more details.\n\nIt may be more convenient to put the credentials directly into the\nrclone config file under the `client_id`, `tenant` and `client_secret`\nkeys instead of setting `service_principal_file`.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "use_msi", - "Help": "Use a managed service identity to authenticate (only works in Azure).\n\nWhen true, use a [managed service identity](https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/)\nto authenticate to Azure Storage instead of a SAS token or account key.\n\nIf the VM(SS) on which this program is running has a system-assigned identity, it will\nbe used by default. If the resource has no system-assigned but exactly one user-assigned identity,\nthe user-assigned identity will be used by default. If the resource has multiple user-assigned\nidentities, the identity to use must be explicitly specified using exactly one of the msi_object_id,\nmsi_client_id, or msi_mi_res_id parameters.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "msi_object_id", - "Help": "Object ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_client_id or msi_mi_res_id specified.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "msi_client_id", - "Help": "Object ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_object_id or msi_mi_res_id specified.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "msi_mi_res_id", - "Help": "Azure resource ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_client_id or msi_object_id specified.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "use_emulator", - "Help": "Uses local storage emulator if provided as 'true'.\n\nLeave blank if using real azure storage endpoint.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "endpoint", - "Help": "Endpoint for the service.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to chunked upload (\u003c= 256 MiB) (deprecated).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "chunk_size", - "Help": "Upload chunk size.\n\nNote that this is stored in memory and there may be up to\n\"--transfers\" * \"--azureblob-upload-concurrency\" chunks stored at once\nin memory.", - "Provider": "", - "Default": 4194304, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4Mi", - "ValueStr": "4Mi", - "Type": "SizeSuffix" - }, - { - "Name": "upload_concurrency", - "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nIf you are uploading small numbers of large files over high-speed\nlinks and these uploads do not fully utilize your bandwidth, then\nincreasing this may help to speed up the transfers.\n\nIn tests, upload speed increases almost linearly with upload\nconcurrency. For example to fill a gigabit pipe it may be necessary to\nraise this to 64. Note that this will use more memory.\n\nNote that chunks are stored in memory and there may be up to\n\"--transfers\" * \"--azureblob-upload-concurrency\" chunks stored at once\nin memory.", - "Provider": "", - "Default": 16, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "16", - "ValueStr": "16", - "Type": "int" - }, - { - "Name": "list_chunk", - "Help": "Size of blob list.\n\nThis sets the number of blobs requested in each listing chunk. Default\nis the maximum, 5000. \"List blobs\" requests are permitted 2 minutes\nper megabyte to complete. If an operation is taking longer than 2\nminutes per megabyte on average, it will time out (\n[source](https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations#exceptions-to-default-timeout-interval)\n). This can be used to limit the number of blobs items to return, to\navoid the time out.", - "Provider": "", - "Default": 5000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "5000", - "ValueStr": "5000", - "Type": "int" - }, - { - "Name": "access_tier", - "Help": "Access tier of blob: hot, cool or archive.\n\nArchived blobs can be restored by setting access tier to hot or\ncool. Leave blank if you intend to use default access tier, which is\nset at account level\n\nIf there is no \"access tier\" specified, rclone doesn't apply any tier.\nrclone performs \"Set Tier\" operation on blobs while uploading, if objects\nare not modified, specifying \"access tier\" to new one will have no effect.\nIf blobs are in \"archive tier\" at remote, trying to perform data transfer\noperations from remote will not be allowed. User should first restore by\ntiering blob to \"Hot\" or \"Cool\".", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "archive_tier_delete", - "Help": "Delete archive tier blobs before overwriting.\n\nArchive tier blobs cannot be updated. So without this flag, if you\nattempt to update an archive tier blob, then rclone will produce the\nerror:\n\n can't update archive tier blob without --azureblob-archive-tier-delete\n\nWith this flag set then before rclone attempts to overwrite an archive\ntier blob, it will delete the existing blob before uploading its\nreplacement. This has the potential for data loss if the upload fails\n(unlike updating a normal blob) and also may cost more since deleting\narchive tier blobs early may be chargable.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_checksum", - "Help": "Don't store MD5 checksum with object metadata.\n\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "memory_pool_flush_time", - "Help": "How often internal memory buffer pools will be flushed.\n\nUploads which requires additional buffers (f.e multipart) will use memory pool for allocations.\nThis option controls how often unused buffers will be removed from the pool.", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "memory_pool_use_mmap", - "Help": "Whether to use mmap buffers in internal memory pool.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 21078018, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8", - "ValueStr": "Slash,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8", - "Type": "MultiEncoder" - }, - { - "Name": "public_access", - "Help": "Public access level of a container: blob or container.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "The container and its blobs can be accessed only with an authorized request.\nIt's a default value.", - "Provider": "" - }, - { - "Value": "blob", - "Help": "Blob data within this container can be read via anonymous request.", - "Provider": "" - }, - { - "Value": "container", - "Help": "Allow full public read access for container and blob data.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "directory_markers", - "Help": "Upload an empty object with a trailing slash when a new directory is created\n\nEmpty folders are unsupported for bucket based remotes, this option\ncreates an empty object ending with \"/\", to persist the folder.\n\nThis object also has the metadata \"hdi_isfolder = true\" to conform to\nthe Microsoft standard.\n ", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_check_container", - "Help": "If set, don't attempt to check the container exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the container exists already.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_head_object", - "Help": "If set, do not do HEAD before GET when getting objects.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": true, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "azurefiles", + "Description": "Microsoft Azure Files", + "Prefix": "azurefiles", + "Options": [ + { + "Name": "account", + "FieldName": "", + "Help": "Azure Storage Account Name.\n\nSet this to the Azure Storage Account Name in use.\n\nLeave blank to use SAS URL or connection string, otherwise it needs to be set.\n\nIf this is blank and if env_auth is set it will be read from the\nenvironment variable `AZURE_STORAGE_ACCOUNT_NAME` if possible.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "share_name", + "FieldName": "", + "Help": "Azure Files Share Name.\n\nThis is required and is the name of the share to access.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "env_auth", + "FieldName": "", + "Help": "Read credentials from runtime (environment variables, CLI or MSI).\n\nSee the [authentication docs](/azurefiles#authentication) for full info.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "key", + "FieldName": "", + "Help": "Storage Account Shared Key.\n\nLeave blank to use SAS URL or connection string.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sas_url", + "FieldName": "", + "Help": "SAS URL.\n\nLeave blank if using account/key or connection string.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "connection_string", + "FieldName": "", + "Help": "Azure Files Connection String.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "tenant", + "FieldName": "", + "Help": "ID of the service principal's tenant. Also called its directory ID.\n\nSet this if using\n- Service principal with client secret\n- Service principal with certificate\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_id", + "FieldName": "", + "Help": "The ID of the client in use.\n\nSet this if using\n- Service principal with client secret\n- Service principal with certificate\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "One of the service principal's client secrets\n\nSet this if using\n- Service principal with client secret\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_certificate_path", + "FieldName": "", + "Help": "Path to a PEM or PKCS12 certificate file including the private key.\n\nSet this if using\n- Service principal with certificate\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_certificate_password", + "FieldName": "", + "Help": "Password for the certificate file (optional).\n\nOptionally set this if using\n- Service principal with certificate\n\nAnd the certificate has a password.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_send_certificate_chain", + "FieldName": "", + "Help": "Send the certificate chain when using certificate auth.\n\nSpecifies whether an authentication request will include an x5c header\nto support subject name / issuer based authentication. When set to\ntrue, authentication requests include the x5c header.\n\nOptionally set this if using\n- Service principal with certificate\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "username", + "FieldName": "", + "Help": "User name (usually an email address)\n\nSet this if using\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "The user's password\n\nSet this if using\n- User with username and password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "service_principal_file", + "FieldName": "", + "Help": "Path to file containing credentials for use with a service principal.\n\nLeave blank normally. Needed only if you want to use a service principal instead of interactive login.\n\n $ az ad sp create-for-rbac --name \"\" \\\n --role \"Storage Files Data Owner\" \\\n --scopes \"/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts//blobServices/default/containers/\" \\\n > azure-principal.json\n\nSee [\"Create an Azure service principal\"](https://docs.microsoft.com/en-us/cli/azure/create-an-azure-service-principal-azure-cli) and [\"Assign an Azure role for access to files data\"](https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-rbac-cli) pages for more details.\n\n**NB** this section needs updating for Azure Files - pull requests appreciated!\n\nIt may be more convenient to put the credentials directly into the\nrclone config file under the `client_id`, `tenant` and `client_secret`\nkeys instead of setting `service_principal_file`.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "use_msi", + "FieldName": "", + "Help": "Use a managed service identity to authenticate (only works in Azure).\n\nWhen true, use a [managed service identity](https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/)\nto authenticate to Azure Storage instead of a SAS token or account key.\n\nIf the VM(SS) on which this program is running has a system-assigned identity, it will\nbe used by default. If the resource has no system-assigned but exactly one user-assigned identity,\nthe user-assigned identity will be used by default. If the resource has multiple user-assigned\nidentities, the identity to use must be explicitly specified using exactly one of the msi_object_id,\nmsi_client_id, or msi_mi_res_id parameters.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "msi_object_id", + "FieldName": "", + "Help": "Object ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_client_id or msi_mi_res_id specified.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "msi_client_id", + "FieldName": "", + "Help": "Object ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_object_id or msi_mi_res_id specified.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "msi_mi_res_id", + "FieldName": "", + "Help": "Azure resource ID of the user-assigned MSI to use, if any.\n\nLeave blank if msi_client_id or msi_object_id specified.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "disable_instance_discovery", + "FieldName": "", + "Help": "Skip requesting Microsoft Entra instance metadata\nThis should be set true only by applications authenticating in\ndisconnected clouds, or private clouds such as Azure Stack.\nIt determines whether rclone requests Microsoft Entra instance\nmetadata from `https://login.microsoft.com/` before\nauthenticating.\nSetting this to true will skip this request, making you responsible\nfor ensuring the configured authority is valid and trustworthy.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_az", + "FieldName": "", + "Help": "Use Azure CLI tool az for authentication\nSet to use the [Azure CLI tool az](https://learn.microsoft.com/en-us/cli/azure/)\nas the sole means of authentication.\nSetting this can be useful if you wish to use the az CLI on a host with\na System Managed Identity that you do not want to use.\nDon't set env_auth at the same time.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for the service.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Upload chunk size.\n\nNote that this is stored in memory and there may be up to\n\"--transfers\" * \"--azurefile-upload-concurrency\" chunks stored at once\nin memory.", + "Default": 4194304, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4Mi", + "ValueStr": "4Mi", + "Type": "SizeSuffix" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nIf you are uploading small numbers of large files over high-speed\nlinks and these uploads do not fully utilize your bandwidth, then\nincreasing this may help to speed up the transfers.\n\nNote that chunks are stored in memory and there may be up to\n\"--transfers\" * \"--azurefile-upload-concurrency\" chunks stored at once\nin memory.", + "Default": 16, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "16", + "ValueStr": "16", + "Type": "int" + }, + { + "Name": "max_stream_size", + "FieldName": "", + "Help": "Max size for streamed files.\n\nAzure files needs to know in advance how big the file will be. When\nrclone doesn't know it uses this value instead.\n\nThis will be used when rclone is streaming data, the most common uses are:\n\n- Uploading files with `--vfs-cache-mode off` with `rclone mount`\n- Using `rclone rcat`\n- Copying files with unknown length\n\nYou will need this much free space in the share as the file will be this size temporarily.\n", + "Default": 10737418240, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Gi", + "ValueStr": "10Gi", + "Type": "SizeSuffix" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 54634382, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "b2", + "Description": "Backblaze B2", + "Prefix": "b2", + "Options": [ + { + "Name": "account", + "FieldName": "", + "Help": "Account ID or Application Key ID.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "key", + "FieldName": "", + "Help": "Application Key.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for the service.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "test_mode", + "FieldName": "", + "Help": "A flag string for X-Bz-Test-Mode header for debugging.\n\nThis is for debugging purposes only. Setting it to one of the strings\nbelow will cause b2 to return specific errors:\n\n * \"fail_some_uploads\"\n * \"expire_some_account_authorization_tokens\"\n * \"force_cap_exceeded\"\n\nThese will be set in the \"X-Bz-Test-Mode\" header which is documented\nin the [b2 integrations checklist](https://www.backblaze.com/docs/cloud-storage-integration-checklist).", + "Default": "", + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "versions", + "FieldName": "", + "Help": "Include old versions in directory listings.\n\nNote that when using this no file write operations are permitted,\nso you can't upload files or delete them.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "version_at", + "FieldName": "", + "Help": "Show file versions as they were at the specified time.\n\nNote that when using this no file write operations are permitted,\nso you can't upload files or delete them.", + "Default": "0001-01-01T00:00:00Z", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "Time" + }, + { + "Name": "hard_delete", + "FieldName": "", + "Help": "Permanently delete files on remote removal, otherwise hide files.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to chunked upload.\n\nFiles above this size will be uploaded in chunks of \"--b2-chunk-size\".\n\nThis value should be set no larger than 4.657 GiB (== 5 GB).", + "Default": 209715200, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "200Mi", + "ValueStr": "200Mi", + "Type": "SizeSuffix" + }, + { + "Name": "copy_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to multipart copy.\n\nAny files larger than this that need to be server-side copied will be\ncopied in chunks of this size.\n\nThe minimum is 0 and the maximum is 4.6 GiB.", + "Default": 4294967296, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4Gi", + "ValueStr": "4Gi", + "Type": "SizeSuffix" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Upload chunk size.\n\nWhen uploading large files, chunk the file into this size.\n\nMust fit in memory. These chunks are buffered in memory and there\nmight a maximum of \"--transfers\" chunks in progress at once.\n\n5,000,000 Bytes is the minimum size.", + "Default": 100663296, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "96Mi", + "ValueStr": "96Mi", + "Type": "SizeSuffix" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nNote that chunks are stored in memory and there may be up to\n\"--transfers\" * \"--b2-upload-concurrency\" chunks stored at once\nin memory.", + "Default": 4, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4", + "ValueStr": "4", + "Type": "int" + }, + { + "Name": "disable_checksum", + "FieldName": "", + "Help": "Disable checksums for large (> upload cutoff) files.\n\nNormally rclone will calculate the SHA1 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "download_url", + "FieldName": "", + "Help": "Custom endpoint for downloads.\n\nThis is usually set to a Cloudflare CDN URL as Backblaze offers\nfree egress for data downloaded through the Cloudflare network.\nRclone works with private buckets by sending an \"Authorization\" header.\nIf the custom endpoint rewrites the requests for authentication,\ne.g., in Cloudflare Workers, this header needs to be handled properly.\nLeave blank if you want to use the endpoint provided by Backblaze.\n\nThe URL provided here SHOULD have the protocol and SHOULD NOT have\na trailing slash or specify the /file/bucket subpath as rclone will\nrequest files with \"{download_url}/file/{bucket_name}/{path}\".\n\nExample:\n> https://mysubdomain.mydomain.tld\n(No trailing \"/\", \"file\" or \"bucket\")", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "download_auth_duration", + "FieldName": "", + "Help": "Time before the public link authorization token will expire in s or suffix ms|s|m|h|d.\n\nThis is used in combination with \"rclone link\" for making files\naccessible to the public and sets the duration before the download\nauthorization token will expire.\n\nThe minimum value is 1 second. The maximum value is one week.", + "Default": 604800000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1w", + "ValueStr": "1w", + "Type": "Duration" + }, + { + "Name": "memory_pool_flush_time", + "FieldName": "", + "Help": "How often internal memory buffer pools will be flushed. (no longer used)", + "Default": 60000000000, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "memory_pool_use_mmap", + "FieldName": "", + "Help": "Whether to use mmap buffers in internal memory pool. (no longer used)", + "Default": false, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "lifecycle", + "FieldName": "", + "Help": "Set the number of days deleted files should be kept when creating a bucket.\n\nOn bucket creation, this parameter is used to create a lifecycle rule\nfor the entire bucket.\n\nIf lifecycle is 0 (the default) it does not create a lifecycle rule so\nthe default B2 behaviour applies. This is to create versions of files\non delete and overwrite and to keep them indefinitely.\n\nIf lifecycle is >0 then it creates a single rule setting the number of\ndays before a file that is deleted or overwritten is deleted\npermanently. This is known as daysFromHidingToDeleting in the b2 docs.\n\nThe minimum value for this parameter is 1 day.\n\nYou can also enable hard_delete in the config also which will mean\ndeletions won't cause versions but overwrites will still cause\nversions to be made.\n\nSee: [rclone backend lifecycle](#lifecycle) for setting lifecycles after bucket creation.\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438146, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "lifecycle", + "Short": "Read or set the lifecycle for a bucket", + "Long": "This command can be used to read or set the lifecycle for a bucket.\n\nUsage Examples:\n\nTo show the current lifecycle rules:\n\n rclone backend lifecycle b2:bucket\n\nThis will dump something like this showing the lifecycle rules.\n\n [\n {\n \"daysFromHidingToDeleting\": 1,\n \"daysFromUploadingToHiding\": null,\n \"daysFromStartingToCancelingUnfinishedLargeFiles\": null,\n \"fileNamePrefix\": \"\"\n }\n ]\n\nIf there are no lifecycle rules (the default) then it will just return [].\n\nTo reset the current lifecycle rules:\n\n rclone backend lifecycle b2:bucket -o daysFromHidingToDeleting=30\n rclone backend lifecycle b2:bucket -o daysFromUploadingToHiding=5 -o daysFromHidingToDeleting=1\n\nThis will run and then print the new lifecycle rules as above.\n\nRclone only lets you set lifecycles for the whole bucket with the\nfileNamePrefix = \"\".\n\nYou can't disable versioning with B2. The best you can do is to set\nthe daysFromHidingToDeleting to 1 day. You can enable hard_delete in\nthe config also which will mean deletions won't cause versions but\noverwrites will still cause versions to be made.\n\n rclone backend lifecycle b2:bucket -o daysFromHidingToDeleting=1\n\nSee: https://www.backblaze.com/docs/cloud-storage-lifecycle-rules\n", + "Opts": { + "daysFromHidingToDeleting": "After a file has been hidden for this many days it is deleted. 0 is off.", + "daysFromStartingToCancelingUnfinishedLargeFiles": "Cancels any unfinished large file versions after this many days", + "daysFromUploadingToHiding": "This many days after uploading a file is hidden" + } + }, + { + "Name": "cleanup", + "Short": "Remove unfinished large file uploads.", + "Long": "This command removes unfinished large file uploads of age greater than\nmax-age, which defaults to 24 hours.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup b2:bucket/path/to/object\n rclone backend cleanup -o max-age=7w b2:bucket/path/to/object\n\nDurations are parsed as per the rest of rclone, 2h, 7d, 7w etc.\n", + "Opts": { + "max-age": "Max age of upload to delete" + } + }, + { + "Name": "cleanup-hidden", + "Short": "Remove old versions of files.", + "Long": "This command removes any old hidden versions of files.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup-hidden b2:bucket/path/to/dir\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "box", + "Description": "Box", + "Prefix": "box", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "Fill in for rclone to use a non root folder as its starting point.", + "Default": "0", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "string" + }, + { + "Name": "box_config_file", + "FieldName": "", + "Help": "Box App config.json location\n\nLeave blank normally.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "access_token", + "FieldName": "", + "Help": "Box App Primary Access Token\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "box_sub_type", + "FieldName": "", + "Help": "", + "Default": "user", + "Value": null, + "Examples": [ + { + "Value": "user", + "Help": "Rclone should act on behalf of a user." + }, + { + "Value": "enterprise", + "Help": "Rclone should act on behalf of a service account." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "user", + "ValueStr": "user", + "Type": "string" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to multipart upload (>= 50 MiB).", + "Default": 52428800, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "50Mi", + "ValueStr": "50Mi", + "Type": "SizeSuffix" + }, + { + "Name": "commit_retries", + "FieldName": "", + "Help": "Max number of times to try committing a multipart file.", + "Default": 100, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "100", + "ValueStr": "100", + "Type": "int" + }, + { + "Name": "list_chunk", + "FieldName": "", + "Help": "Size of listing chunk 1-1000.", + "Default": 1000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1000", + "ValueStr": "1000", + "Type": "int" + }, + { + "Name": "owned_by", + "FieldName": "", + "Help": "Only show items owned by the login (email address) passed in.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "impersonate", + "FieldName": "", + "Help": "Impersonate this user ID when using a service account.\n\nSetting this flag allows rclone, when using a JWT service account, to\nact on behalf of another user by setting the as-user header.\n\nThe user ID is the Box identifier for a user. User IDs can found for\nany user via the GET /users endpoint, which is only available to\nadmins, or by calling the GET /users/me endpoint with an authenticated\nuser session.\n\nSee: https://developer.box.com/guides/authentication/jwt/as-user/\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 52535298, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "cache", + "Description": "Cache a remote", + "Prefix": "cache", + "Options": [ + { + "Name": "remote", + "FieldName": "", + "Help": "Remote to cache.\n\nNormally should contain a ':' and a path, e.g. \"myremote:path/to/dir\",\n\"myremote:bucket\" or maybe \"myremote:\" (not recommended).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "plex_url", + "FieldName": "", + "Help": "The URL of the Plex server.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "plex_username", + "FieldName": "", + "Help": "The username of the Plex user.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "plex_password", + "FieldName": "", + "Help": "The password of the Plex user.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "plex_token", + "FieldName": "", + "Help": "The plex token for authentication - auto set normally.", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "plex_insecure", + "FieldName": "", + "Help": "Skip all certificate verification when connecting to the Plex server.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "The size of a chunk (partial file data).\n\nUse lower numbers for slower connections. If the chunk size is\nchanged, any downloaded chunks will be invalid and cache-chunk-path\nwill need to be cleared or unexpected EOF errors will occur.", + "Default": 5242880, + "Value": null, + "Examples": [ + { + "Value": "1M", + "Help": "1 MiB" + }, + { + "Value": "5M", + "Help": "5 MiB" + }, + { + "Value": "10M", + "Help": "10 MiB" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "5Mi", + "ValueStr": "5Mi", + "Type": "SizeSuffix" + }, + { + "Name": "info_age", + "FieldName": "", + "Help": "How long to cache file structure information (directory listings, file size, times, etc.). \nIf all write operations are done through the cache then you can safely make\nthis value very large as the cache store will also be updated in real time.", + "Default": 21600000000000, + "Value": null, + "Examples": [ + { + "Value": "1h", + "Help": "1 hour" + }, + { + "Value": "24h", + "Help": "24 hours" + }, + { + "Value": "48h", + "Help": "48 hours" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "6h0m0s", + "ValueStr": "6h0m0s", + "Type": "Duration" + }, + { + "Name": "chunk_total_size", + "FieldName": "", + "Help": "The total size that the chunks can take up on the local disk.\n\nIf the cache exceeds this value then it will start to delete the\noldest chunks until it goes under this value.", + "Default": 10737418240, + "Value": null, + "Examples": [ + { + "Value": "500M", + "Help": "500 MiB" + }, + { + "Value": "1G", + "Help": "1 GiB" + }, + { + "Value": "10G", + "Help": "10 GiB" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Gi", + "ValueStr": "10Gi", + "Type": "SizeSuffix" + }, + { + "Name": "db_path", + "FieldName": "", + "Help": "Directory to store file structure metadata DB.\n\nThe remote name is used as the DB file name.", + "Default": "/home/vscode/.cache/rclone/cache-backend", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "/home/vscode/.cache/rclone/cache-backend", + "ValueStr": "/home/vscode/.cache/rclone/cache-backend", + "Type": "string" + }, + { + "Name": "chunk_path", + "FieldName": "", + "Help": "Directory to cache chunk files.\n\nPath to where partial file data (chunks) are stored locally. The remote\nname is appended to the final path.\n\nThis config follows the \"--cache-db-path\". If you specify a custom\nlocation for \"--cache-db-path\" and don't specify one for \"--cache-chunk-path\"\nthen \"--cache-chunk-path\" will use the same path as \"--cache-db-path\".", + "Default": "/home/vscode/.cache/rclone/cache-backend", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "/home/vscode/.cache/rclone/cache-backend", + "ValueStr": "/home/vscode/.cache/rclone/cache-backend", + "Type": "string" + }, + { + "Name": "db_purge", + "FieldName": "", + "Help": "Clear all the cached data for this remote on start.", + "Default": false, + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "chunk_clean_interval", + "FieldName": "", + "Help": "How often should the cache perform cleanups of the chunk storage.\n\nThe default value should be ok for most people. If you find that the\ncache goes over \"cache-chunk-total-size\" too often then try to lower\nthis value to force it to perform cleanups more often.", + "Default": 60000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "read_retries", + "FieldName": "", + "Help": "How many times to retry a read from a cache storage.\n\nSince reading from a cache stream is independent from downloading file\ndata, readers can get to a point where there's no more data in the\ncache. Most of the times this can indicate a connectivity issue if\ncache isn't able to provide file data anymore.\n\nFor really slow connections, increase this to a point where the stream is\nable to provide data but your experience will be very stuttering.", + "Default": 10, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10", + "ValueStr": "10", + "Type": "int" + }, + { + "Name": "workers", + "FieldName": "", + "Help": "How many workers should run in parallel to download chunks.\n\nHigher values will mean more parallel processing (better CPU needed)\nand more concurrent requests on the cloud provider. This impacts\nseveral aspects like the cloud provider API limits, more stress on the\nhardware that rclone runs on but it also means that streams will be\nmore fluid and data will be available much more faster to readers.\n\n**Note**: If the optional Plex integration is enabled then this\nsetting will adapt to the type of reading performed and the value\nspecified here will be used as a maximum number of workers to use.", + "Default": 4, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4", + "ValueStr": "4", + "Type": "int" + }, + { + "Name": "chunk_no_memory", + "FieldName": "", + "Help": "Disable the in-memory cache for storing chunks during streaming.\n\nBy default, cache will keep file data during streaming in RAM as well\nto provide it to readers as fast as possible.\n\nThis transient data is evicted as soon as it is read and the number of\nchunks stored doesn't exceed the number of workers. However, depending\non other settings like \"cache-chunk-size\" and \"cache-workers\" this footprint\ncan increase if there are parallel streams too (multiple files being read\nat the same time).\n\nIf the hardware permits it, use this feature to provide an overall better\nperformance during streaming but it can also be disabled if RAM is not\navailable on the local machine.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "rps", + "FieldName": "", + "Help": "Limits the number of requests per second to the source FS (-1 to disable).\n\nThis setting places a hard limit on the number of requests per second\nthat cache will be doing to the cloud provider remote and try to\nrespect that value by setting waits between reads.\n\nIf you find that you're getting banned or limited on the cloud\nprovider through cache and know that a smaller number of requests per\nsecond will allow you to work with it then you can use this setting\nfor that.\n\nA good balance of all the other settings should make this setting\nuseless but it is available to set for more special cases.\n\n**NOTE**: This will limit the number of requests during streams but\nother API calls to the cloud provider like directory listings will\nstill pass.", + "Default": -1, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "-1", + "ValueStr": "-1", + "Type": "int" + }, + { + "Name": "writes", + "FieldName": "", + "Help": "Cache file data on writes through the FS.\n\nIf you need to read files immediately after you upload them through\ncache you can enable this flag to have their data stored in the\ncache store at the same time during upload.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "tmp_upload_path", + "FieldName": "", + "Help": "Directory to keep temporary files until they are uploaded.\n\nThis is the path where cache will use as a temporary storage for new\nfiles that need to be uploaded to the cloud provider.\n\nSpecifying a value will enable this feature. Without it, it is\ncompletely disabled and files will be uploaded directly to the cloud\nprovider", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "tmp_wait_time", + "FieldName": "", + "Help": "How long should files be stored in local cache before being uploaded.\n\nThis is the duration that a file must wait in the temporary location\n_cache-tmp-upload-path_ before it is selected for upload.\n\nNote that only one file is uploaded at a time and it can take longer\nto start the upload if a queue formed for this purpose.", + "Default": 15000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "15s", + "ValueStr": "15s", + "Type": "Duration" + }, + { + "Name": "db_wait_time", + "FieldName": "", + "Help": "How long to wait for the DB to be available - 0 is unlimited.\n\nOnly one process can have the DB open at any one time, so rclone waits\nfor this duration for the DB to become available before it gives an\nerror.\n\nIf you set it to 0 then it will wait forever.", + "Default": 1000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1s", + "ValueStr": "1s", + "Type": "Duration" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "stats", + "Short": "Print stats on the cache backend in JSON format.", + "Long": "", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "chunker", + "Description": "Transparently chunk/split large files", + "Prefix": "chunker", + "Options": [ + { + "Name": "remote", + "FieldName": "", + "Help": "Remote to chunk/unchunk.\n\nNormally should contain a ':' and a path, e.g. \"myremote:path/to/dir\",\n\"myremote:bucket\" or maybe \"myremote:\" (not recommended).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Files larger than chunk size will be split in chunks.", + "Default": 2147483648, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "2Gi", + "ValueStr": "2Gi", + "Type": "SizeSuffix" + }, + { + "Name": "name_format", + "FieldName": "", + "Help": "String format of chunk file names.\n\nThe two placeholders are: base file name (*) and chunk number (#...).\nThere must be one and only one asterisk and one or more consecutive hash characters.\nIf chunk number has less digits than the number of hashes, it is left-padded by zeros.\nIf there are more digits in the number, they are left as is.\nPossible chunk files are ignored if their name does not match given format.", + "Default": "*.rclone_chunk.###", + "Value": null, + "Hide": 1, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "*.rclone_chunk.###", + "ValueStr": "*.rclone_chunk.###", + "Type": "string" + }, + { + "Name": "start_from", + "FieldName": "", + "Help": "Minimum valid chunk number. Usually 0 or 1.\n\nBy default chunk numbers start from 1.", + "Default": 1, + "Value": null, + "Hide": 1, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1", + "ValueStr": "1", + "Type": "int" + }, + { + "Name": "meta_format", + "FieldName": "", + "Help": "Format of the metadata object or \"none\".\n\nBy default \"simplejson\".\nMetadata is a small JSON file named after the composite file.", + "Default": "simplejson", + "Value": null, + "Examples": [ + { + "Value": "none", + "Help": "Do not use metadata files at all.\nRequires hash type \"none\"." + }, + { + "Value": "simplejson", + "Help": "Simple JSON supports hash sums and chunk validation.\n\nIt has the following fields: ver, size, nchunks, md5, sha1." + } + ], + "Hide": 1, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "simplejson", + "ValueStr": "simplejson", + "Type": "string" + }, + { + "Name": "hash_type", + "FieldName": "", + "Help": "Choose how chunker handles hash sums.\n\nAll modes but \"none\" require metadata.", + "Default": "md5", + "Value": null, + "Examples": [ + { + "Value": "none", + "Help": "Pass any hash supported by wrapped remote for non-chunked files.\nReturn nothing otherwise." + }, + { + "Value": "md5", + "Help": "MD5 for composite files." + }, + { + "Value": "sha1", + "Help": "SHA1 for composite files." + }, + { + "Value": "md5all", + "Help": "MD5 for all files." + }, + { + "Value": "sha1all", + "Help": "SHA1 for all files." + }, + { + "Value": "md5quick", + "Help": "Copying a file to chunker will request MD5 from the source.\nFalling back to SHA1 if unsupported." + }, + { + "Value": "sha1quick", + "Help": "Similar to \"md5quick\" but prefers SHA1 over MD5." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "md5", + "ValueStr": "md5", + "Type": "string" + }, + { + "Name": "fail_hard", + "FieldName": "", + "Help": "Choose how chunker should handle files with missing or invalid chunks.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "true", + "Help": "Report errors and abort current command." + }, + { + "Value": "false", + "Help": "Warn user, skip incomplete file and proceed." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "transactions", + "FieldName": "", + "Help": "Choose how chunker should handle temporary files during transactions.", + "Default": "rename", + "Value": null, + "Examples": [ + { + "Value": "rename", + "Help": "Rename temporary files after a successful transaction." + }, + { + "Value": "norename", + "Help": "Leave temporary file names and write transaction ID to metadata file.\nMetadata is required for no rename transactions (meta format cannot be \"none\").\nIf you are using norename transactions you should be careful not to downgrade Rclone\nas older versions of Rclone don't support this transaction style and will misinterpret\nfiles manipulated by norename transactions.\nThis method is EXPERIMENTAL, don't use on production systems." + }, + { + "Value": "auto", + "Help": "Rename or norename will be used depending on capabilities of the backend.\nIf meta format is set to \"none\", rename transactions will always be used.\nThis method is EXPERIMENTAL, don't use on production systems." + } + ], + "Hide": 1, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "rename", + "ValueStr": "rename", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "cloudinary", + "Description": "Cloudinary", + "Prefix": "cloudinary", + "Options": [ + { + "Name": "cloud_name", + "FieldName": "", + "Help": "Cloudinary Environment Name", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "api_key", + "FieldName": "", + "Help": "Cloudinary API Key", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "api_secret", + "FieldName": "", + "Help": "Cloudinary API Secret", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "upload_prefix", + "FieldName": "", + "Help": "Specify the API endpoint for environments out of the US", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "upload_preset", + "FieldName": "", + "Help": "Upload Preset to select asset manipulation on upload", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 52543246, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "eventually_consistent_delay", + "FieldName": "", + "Help": "Wait N seconds for eventual consistency of the databases that support the backend operation", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0s", + "ValueStr": "0s", + "Type": "Duration" + }, + { + "Name": "adjust_media_files_extensions", + "FieldName": "", + "Help": "Cloudinary handles media formats as a file attribute and strips it from the name, which is unlike most other file systems", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "media_extensions", + "FieldName": "", + "Help": "Cloudinary supported media extensions", + "Default": [ + "3ds", + "3g2", + "3gp", + "ai", + "arw", + "avi", + "avif", + "bmp", + "bw", + "cr2", + "cr3", + "djvu", + "dng", + "eps3", + "fbx", + "flif", + "flv", + "gif", + "glb", + "gltf", + "hdp", + "heic", + "heif", + "ico", + "indd", + "jp2", + "jpe", + "jpeg", + "jpg", + "jxl", + "jxr", + "m2ts", + "mov", + "mp4", + "mpeg", + "mts", + "mxf", + "obj", + "ogv", + "pdf", + "ply", + "png", + "psd", + "svg", + "tga", + "tif", + "tiff", + "ts", + "u3ma", + "usdz", + "wdp", + "webm", + "webp", + "wmv" + ], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "[3ds 3g2 3gp ai arw avi avif bmp bw cr2 cr3 djvu dng eps3 fbx flif flv gif glb gltf hdp heic heif ico indd jp2 jpe jpeg jpg jxl jxr m2ts mov mp4 mpeg mts mxf obj ogv pdf ply png psd svg tga tif tiff ts u3ma usdz wdp webm webp wmv]", + "ValueStr": "3ds,3g2,3gp,ai,arw,avi,avif,bmp,bw,cr2,cr3,djvu,dng,eps3,fbx,flif,flv,gif,glb,gltf,hdp,heic,heif,ico,indd,jp2,jpe,jpeg,jpg,jxl,jxr,m2ts,mov,mp4,mpeg,mts,mxf,obj,ogv,pdf,ply,png,psd,svg,tga,tif,tiff,ts,u3ma,usdz,wdp,webm,webp,wmv", + "Type": "stringArray" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "combine", + "Description": "Combine several remotes into one", + "Prefix": "combine", + "Options": [ + { + "Name": "upstreams", + "FieldName": "", + "Help": "Upstreams for combining\n\nThese should be in the form\n\n dir=remote:path dir2=remote2:path\n\nWhere before the = is specified the root directory and after is the remote to\nput there.\n\nEmbedded spaces can be added using quotes\n\n \"dir=remote:path with space\" \"dir2=remote2:path with space\"\n\n", + "Default": null, + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "SpaceSepList" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": null, + "Help": "Any metadata supported by the underlying remote is read and written." + } + }, + { + "Name": "compress", + "Description": "Compress a remote", + "Prefix": "compress", + "Options": [ + { + "Name": "remote", + "FieldName": "", + "Help": "Remote to compress.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "mode", + "FieldName": "", + "Help": "Compression mode.", + "Default": "gzip", + "Value": null, + "Examples": [ + { + "Value": "gzip", + "Help": "Standard gzip compression with fastest parameters." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "gzip", + "ValueStr": "gzip", + "Type": "string" + }, + { + "Name": "level", + "FieldName": "", + "Help": "GZIP compression level (-2 to 9).\n\nGenerally -1 (default, equivalent to 5) is recommended.\nLevels 1 to 9 increase compression at the cost of speed. Going past 6 \ngenerally offers very little return.\n\nLevel -2 uses Huffman encoding only. Only use if you know what you\nare doing.\nLevel 0 turns off compression.", + "Default": -1, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "-1", + "ValueStr": "-1", + "Type": "int" + }, + { + "Name": "ram_cache_limit", + "FieldName": "", + "Help": "Some remotes don't allow the upload of files with unknown size.\nIn this case the compressed file will need to be cached to determine\nit's size.\n\nFiles smaller than this limit will be cached in RAM, files larger than \nthis limit will be cached on disk.", + "Default": 20971520, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "20Mi", + "ValueStr": "20Mi", + "Type": "SizeSuffix" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": null, + "Help": "Any metadata supported by the underlying remote is read and written." + } + }, + { + "Name": "crypt", + "Description": "Encrypt/Decrypt a remote", + "Prefix": "crypt", + "Options": [ + { + "Name": "remote", + "FieldName": "", + "Help": "Remote to encrypt/decrypt.\n\nNormally should contain a ':' and a path, e.g. \"myremote:path/to/dir\",\n\"myremote:bucket\" or maybe \"myremote:\" (not recommended).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "filename_encryption", + "FieldName": "", + "Help": "How to encrypt the filenames.", + "Default": "standard", + "Value": null, + "Examples": [ + { + "Value": "standard", + "Help": "Encrypt the filenames.\nSee the docs for the details." + }, + { + "Value": "obfuscate", + "Help": "Very simple filename obfuscation." + }, + { + "Value": "off", + "Help": "Don't encrypt the file names.\nAdds a \".bin\", or \"suffix\" extension only." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "standard", + "ValueStr": "standard", + "Type": "string" + }, + { + "Name": "directory_name_encryption", + "FieldName": "", + "Help": "Option to either encrypt directory names or leave them intact.\n\nNB If filename_encryption is \"off\" then this option will do nothing.", + "Default": true, + "Value": null, + "Examples": [ + { + "Value": "true", + "Help": "Encrypt directory names." + }, + { + "Value": "false", + "Help": "Don't encrypt directory names, leave them intact." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "password", + "FieldName": "", + "Help": "Password or pass phrase for encryption.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password2", + "FieldName": "", + "Help": "Password or pass phrase for salt.\n\nOptional but recommended.\nShould be different to the previous password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "server_side_across_configs", + "FieldName": "", + "Help": "Deprecated: use --server-side-across-configs instead.\n\nAllow server-side operations (e.g. copy) to work across different crypt configs.\n\nNormally this option is not what you want, but if you have two crypts\npointing to the same backend you can use it.\n\nThis can be used, for example, to change file name encryption type\nwithout re-uploading all the data. Just make two crypt backends\npointing to two different directories with the single changed\nparameter and use rclone move to move the files between the crypt\nremotes.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "show_mapping", + "FieldName": "", + "Help": "For all files listed show how the names encrypt.\n\nIf this flag is set then for each file that the remote is asked to\nlist, it will log (at level INFO) a line stating the decrypted file\nname and the encrypted file name.\n\nThis is so you can work out which encrypted names are which decrypted\nnames just in case you need to do something with the encrypted file\nnames, or for debugging purposes.", + "Default": false, + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_data_encryption", + "FieldName": "", + "Help": "Option to either encrypt file data or leave it unencrypted.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "true", + "Help": "Don't encrypt file data, leave it unencrypted." + }, + { + "Value": "false", + "Help": "Encrypt file data." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "pass_bad_blocks", + "FieldName": "", + "Help": "If set this will pass bad blocks through as all 0.\n\nThis should not be set in normal operation, it should only be set if\ntrying to recover an encrypted file with errors and it is desired to\nrecover as much of the file as possible.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "strict_names", + "FieldName": "", + "Help": "If set, this will raise an error when crypt comes across a filename that can't be decrypted.\n\n(By default, rclone will just log a NOTICE and continue as normal.)\nThis can happen if encrypted and unencrypted files are stored in the same\ndirectory (which is not recommended.) It may also indicate a more serious\nproblem that should be investigated.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "filename_encoding", + "FieldName": "", + "Help": "How to encode the encrypted filename to text string.\n\nThis option could help with shortening the encrypted filename. The \nsuitable option would depend on the way your remote count the filename\nlength and if it's case sensitive.", + "Default": "base32", + "Value": null, + "Examples": [ + { + "Value": "base32", + "Help": "Encode using base32. Suitable for all remote." + }, + { + "Value": "base64", + "Help": "Encode using base64. Suitable for case sensitive remote." + }, + { + "Value": "base32768", + "Help": "Encode using base32768. Suitable if your remote counts UTF-16 or\nUnicode codepoint instead of UTF-8 byte length. (Eg. Onedrive, Dropbox)" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "base32", + "ValueStr": "base32", + "Type": "string" + }, + { + "Name": "suffix", + "FieldName": "", + "Help": "If this is set it will override the default suffix of \".bin\".\n\nSetting suffix to \"none\" will result in an empty suffix. This may be useful \nwhen the path length is critical.", + "Default": ".bin", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": ".bin", + "ValueStr": ".bin", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "encode", + "Short": "Encode the given filename(s)", + "Long": "This encodes the filenames given as arguments returning a list of\nstrings of the encoded results.\n\nUsage Example:\n\n rclone backend encode crypt: file1 [file2...]\n rclone rc backend/command command=encode fs=crypt: file1 [file2...]\n", + "Opts": null + }, + { + "Name": "decode", + "Short": "Decode the given filename(s)", + "Long": "This decodes the filenames given as arguments returning a list of\nstrings of the decoded results. It will return an error if any of the\ninputs are invalid.\n\nUsage Example:\n\n rclone backend decode crypt: encryptedfile1 [encryptedfile2...]\n rclone rc backend/command command=decode fs=crypt: encryptedfile1 [encryptedfile2...]\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": null, + "Help": "Any metadata supported by the underlying remote is read and written." + } + }, + { + "Name": "doi", + "Description": "DOI datasets", + "Prefix": "doi", + "Options": [ + { + "Name": "doi", + "FieldName": "", + "Help": "The DOI or the doi.org URL.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "provider", + "FieldName": "", + "Help": "DOI provider.\n\nThe DOI provider can be set when rclone does not automatically recognize a supported DOI provider.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "auto", + "Help": "Auto-detect provider" + }, + { + "Value": "zenodo", + "Help": "Zenodo" + }, + { + "Value": "dataverse", + "Help": "Dataverse" + }, + { + "Value": "invenio", + "Help": "Invenio" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "doi_resolver_api_url", + "FieldName": "", + "Help": "The URL of the DOI resolver API to use.\n\nThe DOI resolver can be set for testing or for cases when the the canonical DOI resolver API cannot be used.\n\nDefaults to \"https://doi.org/api\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "metadata", + "Short": "Show metadata about the DOI.", + "Long": "This command returns a JSON object with some information about the DOI.\n\n rclone backend medatadata doi: \n\nIt returns a JSON object representing metadata about the DOI.\n", + "Opts": null + }, + { + "Name": "set", + "Short": "Set command for updating the config parameters.", + "Long": "This set command can be used to update the config parameters\nfor a running doi backend.\n\nUsage Examples:\n\n rclone backend set doi: [-o opt_name=opt_value] [-o opt_name2=opt_value2]\n rclone rc backend/command command=set fs=doi: [-o opt_name=opt_value] [-o opt_name2=opt_value2]\n rclone rc backend/command command=set fs=doi: -o doi=NEW_DOI\n\nThe option keys are named as they are in the config file.\n\nThis rebuilds the connection to the doi backend when it is called with\nthe new parameters. Only new parameters need be passed as the values\nwill default to those currently in use.\n\nIt doesn't return anything.\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "drive", + "Description": "Google Drive", + "Prefix": "drive", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "Google Application Client Id\nSetting your own is recommended.\nSee https://rclone.org/drive/#making-your-own-client-id for how to create your own.\nIf you leave this blank, it will use an internal key which is low performance.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "scope", + "FieldName": "", + "Help": "Comma separated list of scopes that rclone should use when requesting access from drive.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "drive", + "Help": "Full access all files, excluding Application Data Folder." + }, + { + "Value": "drive.readonly", + "Help": "Read-only access to file metadata and file contents." + }, + { + "Value": "drive.file", + "Help": "Access to files created by rclone only.\nThese are visible in the drive website.\nFile authorization is revoked when the user deauthorizes the app." + }, + { + "Value": "drive.appfolder", + "Help": "Allows read and write access to the Application Data folder.\nThis is not visible in the drive website." + }, + { + "Value": "drive.metadata.readonly", + "Help": "Allows read-only access to file metadata but\ndoes not allow any access to read or download file content." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "b2", - "Description": "Backblaze B2", - "Prefix": "b2", - "Options": [ - { - "Name": "account", - "Help": "Account ID or Application Key ID.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "key", - "Help": "Application Key.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for the service.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "test_mode", - "Help": "A flag string for X-Bz-Test-Mode header for debugging.\n\nThis is for debugging purposes only. Setting it to one of the strings\nbelow will cause b2 to return specific errors:\n\n * \"fail_some_uploads\"\n * \"expire_some_account_authorization_tokens\"\n * \"force_cap_exceeded\"\n\nThese will be set in the \"X-Bz-Test-Mode\" header which is documented\nin the [b2 integrations checklist](https://www.backblaze.com/b2/docs/integration_checklist.html).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "versions", - "Help": "Include old versions in directory listings.\n\nNote that when using this no file write operations are permitted,\nso you can't upload files or delete them.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "version_at", - "Help": "Show file versions as they were at the specified time.\n\nNote that when using this no file write operations are permitted,\nso you can't upload files or delete them.", - "Provider": "", - "Default": "0001-01-01T00:00:00Z", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "off", - "ValueStr": "off", - "Type": "Time" - }, - { - "Name": "hard_delete", - "Help": "Permanently delete files on remote removal, otherwise hide files.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to chunked upload.\n\nFiles above this size will be uploaded in chunks of \"--b2-chunk-size\".\n\nThis value should be set no larger than 4.657 GiB (== 5 GB).", - "Provider": "", - "Default": 209715200, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "200Mi", - "ValueStr": "200Mi", - "Type": "SizeSuffix" - }, - { - "Name": "copy_cutoff", - "Help": "Cutoff for switching to multipart copy.\n\nAny files larger than this that need to be server-side copied will be\ncopied in chunks of this size.\n\nThe minimum is 0 and the maximum is 4.6 GiB.", - "Provider": "", - "Default": 4294967296, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4Gi", - "ValueStr": "4Gi", - "Type": "SizeSuffix" - }, - { - "Name": "chunk_size", - "Help": "Upload chunk size.\n\nWhen uploading large files, chunk the file into this size.\n\nMust fit in memory. These chunks are buffered in memory and there\nmight a maximum of \"--transfers\" chunks in progress at once.\n\n5,000,000 Bytes is the minimum size.", - "Provider": "", - "Default": 100663296, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "96Mi", - "ValueStr": "96Mi", - "Type": "SizeSuffix" - }, - { - "Name": "disable_checksum", - "Help": "Disable checksums for large (\u003e upload cutoff) files.\n\nNormally rclone will calculate the SHA1 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "download_url", - "Help": "Custom endpoint for downloads.\n\nThis is usually set to a Cloudflare CDN URL as Backblaze offers\nfree egress for data downloaded through the Cloudflare network.\nRclone works with private buckets by sending an \"Authorization\" header.\nIf the custom endpoint rewrites the requests for authentication,\ne.g., in Cloudflare Workers, this header needs to be handled properly.\nLeave blank if you want to use the endpoint provided by Backblaze.\n\nThe URL provided here SHOULD have the protocol and SHOULD NOT have\na trailing slash or specify the /file/bucket subpath as rclone will\nrequest files with \"{download_url}/file/{bucket_name}/{path}\".\n\nExample:\n\u003e https://mysubdomain.mydomain.tld\n(No trailing \"/\", \"file\" or \"bucket\")", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "download_auth_duration", - "Help": "Time before the authorization token will expire in s or suffix ms|s|m|h|d.\n\nThe duration before the download authorization token will expire.\nThe minimum value is 1 second. The maximum value is one week.", - "Provider": "", - "Default": 604800000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1w", - "ValueStr": "1w", - "Type": "Duration" - }, - { - "Name": "memory_pool_flush_time", - "Help": "How often internal memory buffer pools will be flushed.\nUploads which requires additional buffers (f.e multipart) will use memory pool for allocations.\nThis option controls how often unused buffers will be removed from the pool.", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "memory_pool_use_mmap", - "Help": "Whether to use mmap buffers in internal memory pool.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50438146, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "ID of the root folder.\nLeave blank normally.\n\nFill in to access \"Computers\" folders (see docs), or for rclone to use\na non root folder as its starting point.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "service_account_file", + "FieldName": "", + "Help": "Service Account Credentials JSON file path.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "service_account_credentials", + "FieldName": "", + "Help": "Service Account Credentials JSON blob.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.", + "Default": "", + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "team_drive", + "FieldName": "", + "Help": "ID of the Shared Drive (Team Drive).", + "Default": "", + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_owner_only", + "FieldName": "", + "Help": "Only consider files owned by the authenticated user.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_trash", + "FieldName": "", + "Help": "Send files to the trash instead of deleting permanently.\n\nDefaults to true, namely sending files to the trash.\nUse `--drive-use-trash=false` to delete files permanently instead.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "copy_shortcut_content", + "FieldName": "", + "Help": "Server side copy contents of shortcuts instead of the shortcut.\n\nWhen doing server side copies, normally rclone will copy shortcuts as\nshortcuts.\n\nIf this flag is used then rclone will copy the contents of shortcuts\nrather than shortcuts themselves when doing server side copies.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "skip_gdocs", + "FieldName": "", + "Help": "Skip google documents in all listings.\n\nIf given, gdocs practically become invisible to rclone.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "show_all_gdocs", + "FieldName": "", + "Help": "Show all Google Docs including non-exportable ones in listings.\n\nIf you try a server side copy on a Google Form without this flag, you\nwill get this error:\n\n No export formats found for \"application/vnd.google-apps.form\"\n\nHowever adding this flag will allow the form to be server side copied.\n\nNote that rclone doesn't add extensions to the Google Docs file names\nin this mode.\n\nDo **not** use this flag when trying to download Google Docs - rclone\nwill fail to download them.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "skip_checksum_gphotos", + "FieldName": "", + "Help": "Skip checksums on Google photos and videos only.\n\nUse this if you get checksum errors when transferring Google photos or\nvideos.\n\nSetting this flag will cause Google photos and videos to return a\nblank checksums.\n\nGoogle photos are identified by being in the \"photos\" space.\n\nCorrupted checksums are caused by Google modifying the image/video but\nnot updating the checksum.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "shared_with_me", + "FieldName": "", + "Help": "Only show files that are shared with me.\n\nInstructs rclone to operate on your \"Shared with me\" folder (where\nGoogle Drive lets you access the files and folders others have shared\nwith you).\n\nThis works both with the \"list\" (lsd, lsl, etc.) and the \"copy\"\ncommands (copy, sync, etc.), and with all other commands too.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "trashed_only", + "FieldName": "", + "Help": "Only show files that are in the trash.\n\nThis will show trashed files in their original directory structure.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "starred_only", + "FieldName": "", + "Help": "Only show files that are starred.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "formats", + "FieldName": "", + "Help": "Deprecated: See export_formats.", + "Default": "", + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "export_formats", + "FieldName": "", + "Help": "Comma separated list of preferred formats for downloading Google docs.", + "Default": "docx,xlsx,pptx,svg", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "docx,xlsx,pptx,svg", + "ValueStr": "docx,xlsx,pptx,svg", + "Type": "string" + }, + { + "Name": "import_formats", + "FieldName": "", + "Help": "Comma separated list of preferred formats for uploading Google docs.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "allow_import_name_change", + "FieldName": "", + "Help": "Allow the filetype to change when uploading Google docs.\n\nE.g. file.doc to file.docx. This will confuse sync and reupload every time.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_created_date", + "FieldName": "", + "Help": "Use file created date instead of modified date.\n\nUseful when downloading data and you want the creation date used in\nplace of the last modified date.\n\n**WARNING**: This flag may have some unexpected consequences.\n\nWhen uploading to your drive all files will be overwritten unless they\nhaven't been modified since their creation. And the inverse will occur\nwhile downloading. This side effect can be avoided by using the\n\"--checksum\" flag.\n\nThis feature was implemented to retain photos capture date as recorded\nby google photos. You will first need to check the \"Create a Google\nPhotos folder\" option in your google drive settings. You can then copy\nor move the photos locally and use the date the image was taken\n(created) set as the modification date.", + "Default": false, + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_shared_date", + "FieldName": "", + "Help": "Use date file was shared instead of modified date.\n\nNote that, as with \"--drive-use-created-date\", this flag may have\nunexpected consequences when uploading/downloading files.\n\nIf both this flag and \"--drive-use-created-date\" are set, the created\ndate is used.", + "Default": false, + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "list_chunk", + "FieldName": "", + "Help": "Size of listing chunk 100-1000, 0 to disable.", + "Default": 1000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1000", + "ValueStr": "1000", + "Type": "int" + }, + { + "Name": "impersonate", + "FieldName": "", + "Help": "Impersonate this user when using a service account.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "alternate_export", + "FieldName": "", + "Help": "Deprecated: No longer needed.", + "Default": false, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to chunked upload.", + "Default": 8388608, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "8Mi", + "ValueStr": "8Mi", + "Type": "SizeSuffix" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Upload chunk size.\n\nMust a power of 2 >= 256k.\n\nMaking this larger will improve performance, but note that each chunk\nis buffered in memory one per transfer.\n\nReducing this will reduce memory usage but decrease performance.", + "Default": 8388608, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "8Mi", + "ValueStr": "8Mi", + "Type": "SizeSuffix" + }, + { + "Name": "acknowledge_abuse", + "FieldName": "", + "Help": "Set to allow files which return cannotDownloadAbusiveFile to be downloaded.\n\nIf downloading a file returns the error \"This file has been identified\nas malware or spam and cannot be downloaded\" with the error code\n\"cannotDownloadAbusiveFile\" then supply this flag to rclone to\nindicate you acknowledge the risks of downloading the file and rclone\nwill download it anyway.\n\nNote that if you are using service account it will need Manager\npermission (not Content Manager) to for this flag to work. If the SA\ndoes not have the right permission, Google will just ignore the flag.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "keep_revision_forever", + "FieldName": "", + "Help": "Keep new head revision of each file forever.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "size_as_quota", + "FieldName": "", + "Help": "Show sizes as storage quota usage, not actual size.\n\nShow the size of a file as the storage quota used. This is the\ncurrent version plus any older versions that have been set to keep\nforever.\n\n**WARNING**: This flag may have some unexpected consequences.\n\nIt is not recommended to set this flag in your config - the\nrecommended usage is using the flag form --drive-size-as-quota when\ndoing rclone ls/lsl/lsf/lsjson/etc only.\n\nIf you do use this flag for syncing (not recommended) then you will\nneed to use --ignore size also.", + "Default": false, + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "v2_download_min_size", + "FieldName": "", + "Help": "If Object's are greater, use drive v2 API to download.", + "Default": -1, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "SizeSuffix" + }, + { + "Name": "pacer_min_sleep", + "FieldName": "", + "Help": "Minimum time to sleep between API calls.", + "Default": 100000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "100ms", + "ValueStr": "100ms", + "Type": "Duration" + }, + { + "Name": "pacer_burst", + "FieldName": "", + "Help": "Number of API calls to allow without sleeping.", + "Default": 100, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "100", + "ValueStr": "100", + "Type": "int" + }, + { + "Name": "server_side_across_configs", + "FieldName": "", + "Help": "Deprecated: use --server-side-across-configs instead.\n\nAllow server-side operations (e.g. copy) to work across different drive configs.\n\nThis can be useful if you wish to do a server-side copy between two\ndifferent Google drives. Note that this isn't enabled by default\nbecause it isn't easy to tell if it will work between any two\nconfigurations.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_http2", + "FieldName": "", + "Help": "Disable drive using http2.\n\nThere is currently an unsolved issue with the google drive backend and\nHTTP/2. HTTP/2 is therefore disabled by default for the drive backend\nbut can be re-enabled here. When the issue is solved this flag will\nbe removed.\n\nSee: https://github.com/rclone/rclone/issues/3631\n\n", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "stop_on_upload_limit", + "FieldName": "", + "Help": "Make upload limit errors be fatal.\n\nAt the time of writing it is only possible to upload 750 GiB of data to\nGoogle Drive a day (this is an undocumented limit). When this limit is\nreached Google Drive produces a slightly different error message. When\nthis flag is set it causes these errors to be fatal. These will stop\nthe in-progress sync.\n\nNote that this detection is relying on error message strings which\nGoogle don't document so it may break in the future.\n\nSee: https://github.com/rclone/rclone/issues/3857\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "stop_on_download_limit", + "FieldName": "", + "Help": "Make download limit errors be fatal.\n\nAt the time of writing it is only possible to download 10 TiB of data from\nGoogle Drive a day (this is an undocumented limit). When this limit is\nreached Google Drive produces a slightly different error message. When\nthis flag is set it causes these errors to be fatal. These will stop\nthe in-progress sync.\n\nNote that this detection is relying on error message strings which\nGoogle don't document so it may break in the future.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "skip_shortcuts", + "FieldName": "", + "Help": "If set skip shortcut files.\n\nNormally rclone dereferences shortcut files making them appear as if\nthey are the original file (see [the shortcuts section](#shortcuts)).\nIf this flag is set then rclone will ignore shortcut files completely.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "skip_dangling_shortcuts", + "FieldName": "", + "Help": "If set skip dangling shortcut files.\n\nIf this is set then rclone will not show any dangling shortcuts in listings.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "resource_key", + "FieldName": "", + "Help": "Resource key for accessing a link-shared file.\n\nIf you need to access files shared with a link like this\n\n https://drive.google.com/drive/folders/XXX?resourcekey=YYY&usp=sharing\n\nThen you will need to use the first part \"XXX\" as the \"root_folder_id\"\nand the second part \"YYY\" as the \"resource_key\" otherwise you will get\n404 not found errors when trying to access the directory.\n\nSee: https://developers.google.com/drive/api/guides/resource-keys\n\nThis resource key requirement only applies to a subset of old files.\n\nNote also that opening the folder once in the web interface (with the\nuser you've authenticated rclone with) seems to be enough so that the\nresource key is not needed.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "fast_list_bug_fix", + "FieldName": "", + "Help": "Work around a bug in Google Drive listing.\n\nNormally rclone will work around a bug in Google Drive when using\n--fast-list (ListR) where the search \"(A in parents) or (B in\nparents)\" returns nothing sometimes. See #3114, #4289 and\nhttps://issuetracker.google.com/issues/149522397\n\nRclone detects this by finding no items in more than one directory\nwhen listing and retries them as lists of individual directories.\n\nThis means that if you have a lot of empty directories rclone will end\nup listing them all individually and this can take many more API\ncalls.\n\nThis flag allows the work-around to be disabled. This is **not**\nrecommended in normal use - only if you have a particular case you are\nhaving trouble with like many empty directories.\n", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "metadata_owner", + "FieldName": "", + "Help": "Control whether owner should be read or written in metadata.\n\nOwner is a standard part of the file metadata so is easy to read. But it\nisn't always desirable to set the owner from the metadata.\n\nNote that you can't set the owner on Shared Drives, and that setting\nownership will generate an email to the new owner (this can't be\ndisabled), and you can't transfer ownership to someone outside your\norganization.\n", + "Default": 1, + "Value": null, + "Examples": [ + { + "Value": "off", + "Help": "Do not read or write the value" + }, + { + "Value": "read", + "Help": "Read the value only" + }, + { + "Value": "write", + "Help": "Write the value only" + }, + { + "Value": "failok", + "Help": "If writing fails log errors only, don't fail the transfer" + }, + { + "Value": "read,write", + "Help": "Read and Write the value." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "box", - "Description": "Box", - "Prefix": "box", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "root_folder_id", - "Help": "Fill in for rclone to use a non root folder as its starting point.", - "Provider": "", - "Default": "0", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "0", - "ValueStr": "0", - "Type": "string" - }, - { - "Name": "box_config_file", - "Help": "Box App config.json location\n\nLeave blank normally.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "access_token", - "Help": "Box App Primary Access Token\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "box_sub_type", - "Help": "", - "Provider": "", - "Default": "user", - "Value": null, - "Examples": [ - { - "Value": "user", - "Help": "Rclone should act on behalf of a user.", - "Provider": "" - }, - { - "Value": "enterprise", - "Help": "Rclone should act on behalf of a service account.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "user", - "ValueStr": "user", - "Type": "string" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to multipart upload (\u003e= 50 MiB).", - "Provider": "", - "Default": 52428800, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "50Mi", - "ValueStr": "50Mi", - "Type": "SizeSuffix" - }, - { - "Name": "commit_retries", - "Help": "Max number of times to try committing a multipart file.", - "Provider": "", - "Default": 100, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "100", - "ValueStr": "100", - "Type": "int" - }, - { - "Name": "list_chunk", - "Help": "Size of listing chunk 1-1000.", - "Provider": "", - "Default": 1000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1000", - "ValueStr": "1000", - "Type": "int" - }, - { - "Name": "owned_by", - "Help": "Only show items owned by the login (email address) passed in.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 52535298, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot", - "ValueStr": "Slash,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "read", + "ValueStr": "read", + "Type": "Bits" + }, + { + "Name": "metadata_permissions", + "FieldName": "", + "Help": "Control whether permissions should be read or written in metadata.\n\nReading permissions metadata from files can be done quickly, but it\nisn't always desirable to set the permissions from the metadata.\n\nNote that rclone drops any inherited permissions on Shared Drives and\nany owner permission on My Drives as these are duplicated in the owner\nmetadata.\n", + "Default": 0, + "Value": null, + "Examples": [ + { + "Value": "off", + "Help": "Do not read or write the value" + }, + { + "Value": "read", + "Help": "Read the value only" + }, + { + "Value": "write", + "Help": "Write the value only" + }, + { + "Value": "failok", + "Help": "If writing fails log errors only, don't fail the transfer" + }, + { + "Value": "read,write", + "Help": "Read and Write the value." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "crypt", - "Description": "Encrypt/Decrypt a remote", - "Prefix": "crypt", - "Options": [ - { - "Name": "remote", - "Help": "Remote to encrypt/decrypt.\n\nNormally should contain a ':' and a path, e.g. \"myremote:path/to/dir\",\n\"myremote:bucket\" or maybe \"myremote:\" (not recommended).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "filename_encryption", - "Help": "How to encrypt the filenames.", - "Provider": "", - "Default": "standard", - "Value": null, - "Examples": [ - { - "Value": "standard", - "Help": "Encrypt the filenames.\nSee the docs for the details.", - "Provider": "" - }, - { - "Value": "obfuscate", - "Help": "Very simple filename obfuscation.", - "Provider": "" - }, - { - "Value": "off", - "Help": "Don't encrypt the file names.\nAdds a \".bin\", or \"suffix\" extension only.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "standard", - "ValueStr": "standard", - "Type": "string" - }, - { - "Name": "directory_name_encryption", - "Help": "Option to either encrypt directory names or leave them intact.\n\nNB If filename_encryption is \"off\" then this option will do nothing.", - "Provider": "", - "Default": true, - "Value": null, - "Examples": [ - { - "Value": "true", - "Help": "Encrypt directory names.", - "Provider": "" - }, - { - "Value": "false", - "Help": "Don't encrypt directory names, leave them intact.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "password", - "Help": "Password or pass phrase for encryption.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password2", - "Help": "Password or pass phrase for salt.\n\nOptional but recommended.\nShould be different to the previous password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "server_side_across_configs", - "Help": "Deprecated: use --server-side-across-configs instead.\n\nAllow server-side operations (e.g. copy) to work across different crypt configs.\n\nNormally this option is not what you want, but if you have two crypts\npointing to the same backend you can use it.\n\nThis can be used, for example, to change file name encryption type\nwithout re-uploading all the data. Just make two crypt backends\npointing to two different directories with the single changed\nparameter and use rclone move to move the files between the crypt\nremotes.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "show_mapping", - "Help": "For all files listed show how the names encrypt.\n\nIf this flag is set then for each file that the remote is asked to\nlist, it will log (at level INFO) a line stating the decrypted file\nname and the encrypted file name.\n\nThis is so you can work out which encrypted names are which decrypted\nnames just in case you need to do something with the encrypted file\nnames, or for debugging purposes.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_data_encryption", - "Help": "Option to either encrypt file data or leave it unencrypted.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "true", - "Help": "Don't encrypt file data, leave it unencrypted.", - "Provider": "" - }, - { - "Value": "false", - "Help": "Encrypt file data.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "pass_bad_blocks", - "Help": "If set this will pass bad blocks through as all 0.\n\nThis should not be set in normal operation, it should only be set if\ntrying to recover an encrypted file with errors and it is desired to\nrecover as much of the file as possible.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "filename_encoding", - "Help": "How to encode the encrypted filename to text string.\n\nThis option could help with shortening the encrypted filename. The \nsuitable option would depend on the way your remote count the filename\nlength and if it's case sensitive.", - "Provider": "", - "Default": "base32", - "Value": null, - "Examples": [ - { - "Value": "base32", - "Help": "Encode using base32. Suitable for all remote.", - "Provider": "" - }, - { - "Value": "base64", - "Help": "Encode using base64. Suitable for case sensitive remote.", - "Provider": "" - }, - { - "Value": "base32768", - "Help": "Encode using base32768. Suitable if your remote counts UTF-16 or\nUnicode codepoint instead of UTF-8 byte length. (Eg. Onedrive, Dropbox)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "base32", - "ValueStr": "base32", - "Type": "string" - }, - { - "Name": "suffix", - "Help": "If this is set it will override the default suffix of \".bin\".\n\nSetting suffix to \"none\" will result in an empty suffix. This may be useful \nwhen the path length is critical.", - "Provider": "", - "Default": ".bin", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": ".bin", - "ValueStr": ".bin", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "Bits" + }, + { + "Name": "metadata_labels", + "FieldName": "", + "Help": "Control whether labels should be read or written in metadata.\n\nReading labels metadata from files takes an extra API transaction and\nwill slow down listings. It isn't always desirable to set the labels\nfrom the metadata.\n\nThe format of labels is documented in the drive API documentation at\nhttps://developers.google.com/drive/api/reference/rest/v3/Label -\nrclone just provides a JSON dump of this format.\n\nWhen setting labels, the label and fields must already exist - rclone\nwill not create them. This means that if you are transferring labels\nfrom two different accounts you will have to create the labels in\nadvance and use the metadata mapper to translate the IDs between the\ntwo accounts.\n", + "Default": 0, + "Value": null, + "Examples": [ + { + "Value": "off", + "Help": "Do not read or write the value" + }, + { + "Value": "read", + "Help": "Read the value only" + }, + { + "Value": "write", + "Help": "Write the value only" + }, + { + "Value": "failok", + "Help": "If writing fails log errors only, don't fail the transfer" + }, + { + "Value": "read,write", + "Help": "Read and Write the value." + } ], - "CommandHelp": [ - { - "Name": "encode", - "Short": "Encode the given filename(s)", - "Long": "This encodes the filenames given as arguments returning a list of\nstrings of the encoded results.\n\nUsage Example:\n\n rclone backend encode crypt: file1 [file2...]\n rclone rc backend/command command=encode fs=crypt: file1 [file2...]\n", - "Opts": null - }, - { - "Name": "decode", - "Short": "Decode the given filename(s)", - "Long": "This decodes the filenames given as arguments returning a list of\nstrings of the decoded results. It will return an error if any of the\ninputs are invalid.\n\nUsage Example:\n\n rclone backend decode crypt: encryptedfile1 [encryptedfile2...]\n rclone rc backend/command command=decode fs=crypt: encryptedfile1 [encryptedfile2...]\n", - "Opts": null - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "Bits" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 16777216, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "InvalidUtf8", + "ValueStr": "InvalidUtf8", + "Type": "Encoding" + }, + { + "Name": "env_auth", + "FieldName": "", + "Help": "Get IAM credentials from runtime (environment variables or instance meta data if no env vars).\n\nOnly applies if service_account_file and service_account_credentials is blank.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "false", + "Help": "Enter credentials in the next step." + }, + { + "Value": "true", + "Help": "Get GCP IAM credentials from the environment (env vars or IAM)." + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": null, - "Help": "Any metadata supported by the underlying remote is read and written." + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "get", + "Short": "Get command for fetching the drive config parameters", + "Long": "This is a get command which will be used to fetch the various drive config parameters\n\nUsage Examples:\n\n rclone backend get drive: [-o service_account_file] [-o chunk_size]\n rclone rc backend/command command=get fs=drive: [-o service_account_file] [-o chunk_size]\n", + "Opts": { + "chunk_size": "show the current upload chunk size", + "service_account_file": "show the current service account file" } - }, - { - "Name": "cache", - "Description": "Cache a remote", - "Prefix": "cache", - "Options": [ - { - "Name": "remote", - "Help": "Remote to cache.\n\nNormally should contain a ':' and a path, e.g. \"myremote:path/to/dir\",\n\"myremote:bucket\" or maybe \"myremote:\" (not recommended).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "plex_url", - "Help": "The URL of the Plex server.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "plex_username", - "Help": "The username of the Plex user.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "plex_password", - "Help": "The password of the Plex user.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "plex_token", - "Help": "The plex token for authentication - auto set normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "plex_insecure", - "Help": "Skip all certificate verification when connecting to the Plex server.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "chunk_size", - "Help": "The size of a chunk (partial file data).\n\nUse lower numbers for slower connections. If the chunk size is\nchanged, any downloaded chunks will be invalid and cache-chunk-path\nwill need to be cleared or unexpected EOF errors will occur.", - "Provider": "", - "Default": 5242880, - "Value": null, - "Examples": [ - { - "Value": "1M", - "Help": "1 MiB", - "Provider": "" - }, - { - "Value": "5M", - "Help": "5 MiB", - "Provider": "" - }, - { - "Value": "10M", - "Help": "10 MiB", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "5Mi", - "ValueStr": "5Mi", - "Type": "SizeSuffix" - }, - { - "Name": "info_age", - "Help": "How long to cache file structure information (directory listings, file size, times, etc.). \nIf all write operations are done through the cache then you can safely make\nthis value very large as the cache store will also be updated in real time.", - "Provider": "", - "Default": 21600000000000, - "Value": null, - "Examples": [ - { - "Value": "1h", - "Help": "1 hour", - "Provider": "" - }, - { - "Value": "24h", - "Help": "24 hours", - "Provider": "" - }, - { - "Value": "48h", - "Help": "48 hours", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "6h0m0s", - "ValueStr": "6h0m0s", - "Type": "Duration" - }, - { - "Name": "chunk_total_size", - "Help": "The total size that the chunks can take up on the local disk.\n\nIf the cache exceeds this value then it will start to delete the\noldest chunks until it goes under this value.", - "Provider": "", - "Default": 10737418240, - "Value": null, - "Examples": [ - { - "Value": "500M", - "Help": "500 MiB", - "Provider": "" - }, - { - "Value": "1G", - "Help": "1 GiB", - "Provider": "" - }, - { - "Value": "10G", - "Help": "10 GiB", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10Gi", - "ValueStr": "10Gi", - "Type": "SizeSuffix" - }, - { - "Name": "db_path", - "Help": "Directory to store file structure metadata DB.\n\nThe remote name is used as the DB file name.", - "Provider": "", - "Default": "/home/zenon/.cache/rclone/cache-backend", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "/home/zenon/.cache/rclone/cache-backend", - "ValueStr": "/home/zenon/.cache/rclone/cache-backend", - "Type": "string" - }, - { - "Name": "chunk_path", - "Help": "Directory to cache chunk files.\n\nPath to where partial file data (chunks) are stored locally. The remote\nname is appended to the final path.\n\nThis config follows the \"--cache-db-path\". If you specify a custom\nlocation for \"--cache-db-path\" and don't specify one for \"--cache-chunk-path\"\nthen \"--cache-chunk-path\" will use the same path as \"--cache-db-path\".", - "Provider": "", - "Default": "/home/zenon/.cache/rclone/cache-backend", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "/home/zenon/.cache/rclone/cache-backend", - "ValueStr": "/home/zenon/.cache/rclone/cache-backend", - "Type": "string" - }, - { - "Name": "db_purge", - "Help": "Clear all the cached data for this remote on start.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "chunk_clean_interval", - "Help": "How often should the cache perform cleanups of the chunk storage.\n\nThe default value should be ok for most people. If you find that the\ncache goes over \"cache-chunk-total-size\" too often then try to lower\nthis value to force it to perform cleanups more often.", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "read_retries", - "Help": "How many times to retry a read from a cache storage.\n\nSince reading from a cache stream is independent from downloading file\ndata, readers can get to a point where there's no more data in the\ncache. Most of the times this can indicate a connectivity issue if\ncache isn't able to provide file data anymore.\n\nFor really slow connections, increase this to a point where the stream is\nable to provide data but your experience will be very stuttering.", - "Provider": "", - "Default": 10, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10", - "ValueStr": "10", - "Type": "int" - }, - { - "Name": "workers", - "Help": "How many workers should run in parallel to download chunks.\n\nHigher values will mean more parallel processing (better CPU needed)\nand more concurrent requests on the cloud provider. This impacts\nseveral aspects like the cloud provider API limits, more stress on the\nhardware that rclone runs on but it also means that streams will be\nmore fluid and data will be available much more faster to readers.\n\n**Note**: If the optional Plex integration is enabled then this\nsetting will adapt to the type of reading performed and the value\nspecified here will be used as a maximum number of workers to use.", - "Provider": "", - "Default": 4, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4", - "ValueStr": "4", - "Type": "int" - }, - { - "Name": "chunk_no_memory", - "Help": "Disable the in-memory cache for storing chunks during streaming.\n\nBy default, cache will keep file data during streaming in RAM as well\nto provide it to readers as fast as possible.\n\nThis transient data is evicted as soon as it is read and the number of\nchunks stored doesn't exceed the number of workers. However, depending\non other settings like \"cache-chunk-size\" and \"cache-workers\" this footprint\ncan increase if there are parallel streams too (multiple files being read\nat the same time).\n\nIf the hardware permits it, use this feature to provide an overall better\nperformance during streaming but it can also be disabled if RAM is not\navailable on the local machine.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "rps", - "Help": "Limits the number of requests per second to the source FS (-1 to disable).\n\nThis setting places a hard limit on the number of requests per second\nthat cache will be doing to the cloud provider remote and try to\nrespect that value by setting waits between reads.\n\nIf you find that you're getting banned or limited on the cloud\nprovider through cache and know that a smaller number of requests per\nsecond will allow you to work with it then you can use this setting\nfor that.\n\nA good balance of all the other settings should make this setting\nuseless but it is available to set for more special cases.\n\n**NOTE**: This will limit the number of requests during streams but\nother API calls to the cloud provider like directory listings will\nstill pass.", - "Provider": "", - "Default": -1, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "-1", - "ValueStr": "-1", - "Type": "int" - }, - { - "Name": "writes", - "Help": "Cache file data on writes through the FS.\n\nIf you need to read files immediately after you upload them through\ncache you can enable this flag to have their data stored in the\ncache store at the same time during upload.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "tmp_upload_path", - "Help": "Directory to keep temporary files until they are uploaded.\n\nThis is the path where cache will use as a temporary storage for new\nfiles that need to be uploaded to the cloud provider.\n\nSpecifying a value will enable this feature. Without it, it is\ncompletely disabled and files will be uploaded directly to the cloud\nprovider", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "tmp_wait_time", - "Help": "How long should files be stored in local cache before being uploaded.\n\nThis is the duration that a file must wait in the temporary location\n_cache-tmp-upload-path_ before it is selected for upload.\n\nNote that only one file is uploaded at a time and it can take longer\nto start the upload if a queue formed for this purpose.", - "Provider": "", - "Default": 15000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "15s", - "ValueStr": "15s", - "Type": "Duration" - }, - { - "Name": "db_wait_time", - "Help": "How long to wait for the DB to be available - 0 is unlimited.\n\nOnly one process can have the DB open at any one time, so rclone waits\nfor this duration for the DB to become available before it gives an\nerror.\n\nIf you set it to 0 then it will wait forever.", - "Provider": "", - "Default": 1000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1s", - "ValueStr": "1s", - "Type": "Duration" - } + }, + { + "Name": "set", + "Short": "Set command for updating the drive config parameters", + "Long": "This is a set command which will be used to update the various drive config parameters\n\nUsage Examples:\n\n rclone backend set drive: [-o service_account_file=sa.json] [-o chunk_size=67108864]\n rclone rc backend/command command=set fs=drive: [-o service_account_file=sa.json] [-o chunk_size=67108864]\n", + "Opts": { + "chunk_size": "update the current upload chunk size", + "service_account_file": "update the current service account file" + } + }, + { + "Name": "shortcut", + "Short": "Create shortcuts from files or directories", + "Long": "This command creates shortcuts from files or directories.\n\nUsage:\n\n rclone backend shortcut drive: source_item destination_shortcut\n rclone backend shortcut drive: source_item -o target=drive2: destination_shortcut\n\nIn the first example this creates a shortcut from the \"source_item\"\nwhich can be a file or a directory to the \"destination_shortcut\". The\n\"source_item\" and the \"destination_shortcut\" should be relative paths\nfrom \"drive:\"\n\nIn the second example this creates a shortcut from the \"source_item\"\nrelative to \"drive:\" to the \"destination_shortcut\" relative to\n\"drive2:\". This may fail with a permission error if the user\nauthenticated with \"drive2:\" can't read files from \"drive:\".\n", + "Opts": { + "target": "optional target remote for the shortcut destination" + } + }, + { + "Name": "drives", + "Short": "List the Shared Drives available to this account", + "Long": "This command lists the Shared Drives (Team Drives) available to this\naccount.\n\nUsage:\n\n rclone backend [-o config] drives drive:\n\nThis will return a JSON list of objects like this\n\n [\n {\n \"id\": \"0ABCDEF-01234567890\",\n \"kind\": \"drive#teamDrive\",\n \"name\": \"My Drive\"\n },\n {\n \"id\": \"0ABCDEFabcdefghijkl\",\n \"kind\": \"drive#teamDrive\",\n \"name\": \"Test Drive\"\n }\n ]\n\nWith the -o config parameter it will output the list in a format\nsuitable for adding to a config file to make aliases for all the\ndrives found and a combined drive.\n\n [My Drive]\n type = alias\n remote = drive,team_drive=0ABCDEF-01234567890,root_folder_id=:\n\n [Test Drive]\n type = alias\n remote = drive,team_drive=0ABCDEFabcdefghijkl,root_folder_id=:\n\n [AllDrives]\n type = combine\n upstreams = \"My Drive=My Drive:\" \"Test Drive=Test Drive:\"\n\nAdding this to the rclone config file will cause those team drives to\nbe accessible with the aliases shown. Any illegal characters will be\nsubstituted with \"_\" and duplicate names will have numbers suffixed.\nIt will also add a remote called AllDrives which shows all the shared\ndrives combined into one directory tree.\n", + "Opts": null + }, + { + "Name": "untrash", + "Short": "Untrash files and directories", + "Long": "This command untrashes all the files and directories in the directory\npassed in recursively.\n\nUsage:\n\nThis takes an optional directory to trash which make this easier to\nuse via the API.\n\n rclone backend untrash drive:directory\n rclone backend --interactive untrash drive:directory subdir\n\nUse the --interactive/-i or --dry-run flag to see what would be restored before restoring it.\n\nResult:\n\n {\n \"Untrashed\": 17,\n \"Errors\": 0\n }\n", + "Opts": null + }, + { + "Name": "copyid", + "Short": "Copy files by ID", + "Long": "This command copies files by ID\n\nUsage:\n\n rclone backend copyid drive: ID path\n rclone backend copyid drive: ID1 path1 ID2 path2\n\nIt copies the drive file with ID given to the path (an rclone path which\nwill be passed internally to rclone copyto). The ID and path pairs can be\nrepeated.\n\nThe path should end with a / to indicate copy the file as named to\nthis directory. If it doesn't end with a / then the last path\ncomponent will be used as the file name.\n\nIf the destination is a drive backend then server-side copying will be\nattempted if possible.\n\nUse the --interactive/-i or --dry-run flag to see what would be copied before copying.\n", + "Opts": null + }, + { + "Name": "moveid", + "Short": "Move files by ID", + "Long": "This command moves files by ID\n\nUsage:\n\n rclone backend moveid drive: ID path\n rclone backend moveid drive: ID1 path1 ID2 path2\n\nIt moves the drive file with ID given to the path (an rclone path which\nwill be passed internally to rclone moveto).\n\nThe path should end with a / to indicate move the file as named to\nthis directory. If it doesn't end with a / then the last path\ncomponent will be used as the file name.\n\nIf the destination is a drive backend then server-side moving will be\nattempted if possible.\n\nUse the --interactive/-i or --dry-run flag to see what would be moved beforehand.\n", + "Opts": null + }, + { + "Name": "exportformats", + "Short": "Dump the export formats for debug purposes", + "Long": "", + "Opts": null + }, + { + "Name": "importformats", + "Short": "Dump the import formats for debug purposes", + "Long": "", + "Opts": null + }, + { + "Name": "query", + "Short": "List files using Google Drive query language", + "Long": "This command lists files based on a query\n\nUsage:\n\n rclone backend query drive: query\n \nThe query syntax is documented at [Google Drive Search query terms and \noperators](https://developers.google.com/drive/api/guides/ref-search-terms).\n\nFor example:\n\n\trclone backend query drive: \"'0ABc9DEFGHIJKLMNop0QRatUVW3X' in parents and name contains 'foo'\"\n\nIf the query contains literal ' or \\ characters, these need to be escaped with\n\\ characters. \"'\" becomes \"\\'\" and \"\\\" becomes \"\\\\\\\", for example to match a \nfile named \"foo ' \\.txt\":\n\n\trclone backend query drive: \"name = 'foo \\' \\\\\\.txt'\"\n\nThe result is a JSON array of matches, for example:\n\n [\n\t{\n\t\t\"createdTime\": \"2017-06-29T19:58:28.537Z\",\n\t\t\"id\": \"0AxBe_CDEF4zkGHI4d0FjYko2QkD\",\n\t\t\"md5Checksum\": \"68518d16be0c6fbfab918be61d658032\",\n\t\t\"mimeType\": \"text/plain\",\n\t\t\"modifiedTime\": \"2024-02-02T10:40:02.874Z\",\n\t\t\"name\": \"foo ' \\\\.txt\",\n\t\t\"parents\": [\n\t\t\t\"0BxAe_BCDE4zkFGZpcWJGek0xbzC\"\n\t\t],\n\t\t\"resourceKey\": \"0-ABCDEFGHIXJQpIGqBJq3MC\",\n\t\t\"sha1Checksum\": \"8f284fa768bfb4e45d076a579ab3905ab6bfa893\",\n\t\t\"size\": \"311\",\n\t\t\"webViewLink\": \"https://drive.google.com/file/d/0AxBe_CDEF4zkGHI4d0FjYko2QkD/view?usp=drivesdk\\u0026resourcekey=0-ABCDEFGHIXJQpIGqBJq3MC\"\n\t}\n ]", + "Opts": null + }, + { + "Name": "rescue", + "Short": "Rescue or delete any orphaned files", + "Long": "This command rescues or deletes any orphaned files or directories.\n\nSometimes files can get orphaned in Google Drive. This means that they\nare no longer in any folder in Google Drive.\n\nThis command finds those files and either rescues them to a directory\nyou specify or deletes them.\n\nUsage:\n\nThis can be used in 3 ways.\n\nFirst, list all orphaned files\n\n rclone backend rescue drive:\n\nSecond rescue all orphaned files to the directory indicated\n\n rclone backend rescue drive: \"relative/path/to/rescue/directory\"\n\ne.g. To rescue all orphans to a directory called \"Orphans\" in the top level\n\n rclone backend rescue drive: Orphans\n\nThird delete all orphaned files to the trash\n\n rclone backend rescue drive: -o delete\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "btime": { + "Help": "Time of file birth (creation) with mS accuracy. Note that this is only writable on fresh uploads - it can't be written for updates.", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999Z07:00", + "ReadOnly": false + }, + "content-type": { + "Help": "The MIME type of the file.", + "Type": "string", + "Example": "text/plain", + "ReadOnly": false + }, + "copy-requires-writer-permission": { + "Help": "Whether the options to copy, print, or download this file, should be disabled for readers and commenters.", + "Type": "boolean", + "Example": "true", + "ReadOnly": false + }, + "description": { + "Help": "A short description of the file.", + "Type": "string", + "Example": "Contract for signing", + "ReadOnly": false + }, + "folder-color-rgb": { + "Help": "The color for a folder or a shortcut to a folder as an RGB hex string.", + "Type": "string", + "Example": "881133", + "ReadOnly": false + }, + "labels": { + "Help": "Labels attached to this file in a JSON dump of Googled drive format. Enable with --drive-metadata-labels.", + "Type": "JSON", + "Example": "[]", + "ReadOnly": false + }, + "mtime": { + "Help": "Time of last modification with mS accuracy.", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999Z07:00", + "ReadOnly": false + }, + "owner": { + "Help": "The owner of the file. Usually an email address. Enable with --drive-metadata-owner.", + "Type": "string", + "Example": "user@example.com", + "ReadOnly": false + }, + "permissions": { + "Help": "Permissions in a JSON dump of Google drive format. On shared drives these will only be present if they aren't inherited. Enable with --drive-metadata-permissions.", + "Type": "JSON", + "Example": "{}", + "ReadOnly": false + }, + "starred": { + "Help": "Whether the user has starred the file.", + "Type": "boolean", + "Example": "false", + "ReadOnly": false + }, + "viewed-by-me": { + "Help": "Whether the file has been viewed by this user.", + "Type": "boolean", + "Example": "true", + "ReadOnly": true + }, + "writers-can-share": { + "Help": "Whether users with only writer permission can modify the file's permissions. Not populated and ignored when setting for items in shared drives.", + "Type": "boolean", + "Example": "false", + "ReadOnly": false + } + }, + "Help": "User metadata is stored in the properties field of the drive object.\n\nMetadata is supported on files and directories.\n" + } + }, + { + "Name": "dropbox", + "Description": "Dropbox", + "Prefix": "dropbox", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Upload chunk size (< 150Mi).\n\nAny files larger than this will be uploaded in chunks of this size.\n\nNote that chunks are buffered in memory (one at a time) so rclone can\ndeal with retries. Setting this larger will increase the speed\nslightly (at most 10% for 128 MiB in tests) at the cost of using more\nmemory. It can be set smaller if you are tight on memory.", + "Default": 50331648, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "48Mi", + "ValueStr": "48Mi", + "Type": "SizeSuffix" + }, + { + "Name": "impersonate", + "FieldName": "", + "Help": "Impersonate this user when using a business account.\n\nNote that if you want to use impersonate, you should make sure this\nflag is set when running \"rclone config\" as this will cause rclone to\nrequest the \"members.read\" scope which it won't normally. This is\nneeded to lookup a members email address into the internal ID that\ndropbox uses in the API.\n\nUsing the \"members.read\" scope will require a Dropbox Team Admin\nto approve during the OAuth flow.\n\nYou will have to use your own App (setting your own client_id and\nclient_secret) to use this option as currently rclone's default set of\npermissions doesn't include \"members.read\". This can be added once\nv1.55 or later is in use everywhere.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "shared_files", + "FieldName": "", + "Help": "Instructs rclone to work on individual shared files.\n\nIn this mode rclone's features are extremely limited - only list (ls, lsl, etc.) \noperations and read operations (e.g. downloading) are supported in this mode.\nAll other operations will be disabled.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "shared_folders", + "FieldName": "", + "Help": "Instructs rclone to work on shared folders.\n\t\t\t\nWhen this flag is used with no path only the List operation is supported and \nall available shared folders will be listed. If you specify a path the first part \nwill be interpreted as the name of shared folder. Rclone will then try to mount this \nshared to the root namespace. On success shared folder rclone proceeds normally. \nThe shared folder is now pretty much a normal folder and all normal operations \nare supported. \n\nNote that we don't unmount the shared folder afterwards so the \n--dropbox-shared-folders can be omitted after the first use of a particular \nshared folder.\n\nSee also --dropbox-root-namespace for an alternative way to work with shared\nfolders.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "pacer_min_sleep", + "FieldName": "", + "Help": "Minimum time to sleep between API calls.", + "Default": 10000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10ms", + "ValueStr": "10ms", + "Type": "Duration" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 52469762, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,RightSpace,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,RightSpace,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "root_namespace", + "FieldName": "", + "Help": "Specify a different Dropbox namespace ID to use as the root for all paths.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "export_formats", + "FieldName": "", + "Help": "Comma separated list of preferred formats for exporting files\n\nCertain Dropbox files can only be accessed by exporting them to another format.\nThese include Dropbox Paper documents.\n\nFor each such file, rclone will choose the first format on this list that Dropbox\nconsiders valid. If none is valid, it will choose Dropbox's default format.\n\nKnown formats include: \"html\", \"md\" (markdown)", + "Default": [ + "html", + "md" + ], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "html,md", + "ValueStr": "html,md", + "Type": "CommaSepList" + }, + { + "Name": "skip_exports", + "FieldName": "", + "Help": "Skip exportable files in all listings.\n\nIf given, exportable files practically become invisible to rclone.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "show_all_exports", + "FieldName": "", + "Help": "Show all exportable files in listings.\n\nAdding this flag will allow all exportable files to be server side copied.\nNote that rclone doesn't add extensions to the exportable file names in this mode.\n\nDo **not** use this flag when trying to download exportable files - rclone\nwill fail to download them.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "batch_mode", + "FieldName": "", + "Help": "Upload file batching sync|async|off.\n\nThis sets the batch mode used by rclone.\n\nFor full info see [the main docs](https://rclone.org/dropbox/#batch-mode)\n\nThis has 3 possible values\n\n- off - no batching\n- sync - batch uploads and check completion (default)\n- async - batch upload and don't check completion\n\nRclone will close any outstanding batches when it exits which may make\na delay on quit.\n", + "Default": "sync", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "sync", + "ValueStr": "sync", + "Type": "string" + }, + { + "Name": "batch_size", + "FieldName": "", + "Help": "Max number of files in upload batch.\n\nThis sets the batch size of files to upload. It has to be less than 1000.\n\nBy default this is 0 which means rclone will calculate the batch size\ndepending on the setting of batch_mode.\n\n- batch_mode: async - default batch_size is 100\n- batch_mode: sync - default batch_size is the same as --transfers\n- batch_mode: off - not in use\n\nRclone will close any outstanding batches when it exits which may make\na delay on quit.\n\nSetting this is a great idea if you are uploading lots of small files\nas it will make them a lot quicker. You can use --transfers 32 to\nmaximise throughput.\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "batch_timeout", + "FieldName": "", + "Help": "Max time to allow an idle upload batch before uploading.\n\nIf an upload batch is idle for more than this long then it will be\nuploaded.\n\nThe default for this is 0 which means rclone will choose a sensible\ndefault based on the batch_mode in use.\n\n- batch_mode: async - default batch_timeout is 10s\n- batch_mode: sync - default batch_timeout is 500ms\n- batch_mode: off - not in use\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0s", + "ValueStr": "0s", + "Type": "Duration" + }, + { + "Name": "batch_commit_timeout", + "FieldName": "", + "Help": "Max time to wait for a batch to finish committing. (no longer used)", + "Default": 600000000000, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10m0s", + "ValueStr": "10m0s", + "Type": "Duration" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "fichier", + "Description": "1Fichier", + "Prefix": "fichier", + "Options": [ + { + "Name": "api_key", + "FieldName": "", + "Help": "Your API Key, get it from https://1fichier.com/console/params.pl.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "shared_folder", + "FieldName": "", + "Help": "If you want to download a shared folder, add this parameter.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "file_password", + "FieldName": "", + "Help": "If you want to download a shared file that is password protected, add this parameter.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "folder_password", + "FieldName": "", + "Help": "If you want to list the files in a shared folder that is password protected, add this parameter.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "cdn", + "FieldName": "", + "Help": "Set if you wish to use CDN download links.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 52666494, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,BackSlash,Del,Ctl,LeftSpace,RightSpace,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,BackSlash,Del,Ctl,LeftSpace,RightSpace,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "filefabric", + "Description": "Enterprise File Fabric", + "Prefix": "filefabric", + "Options": [ + { + "Name": "url", + "FieldName": "", + "Help": "URL of the Enterprise File Fabric to connect to.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "https://storagemadeeasy.com", + "Help": "Storage Made Easy US" + }, + { + "Value": "https://eu.storagemadeeasy.com", + "Help": "Storage Made Easy EU" + }, + { + "Value": "https://yourfabric.smestorage.com", + "Help": "Connect to your Enterprise File Fabric" + } + ], + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "ID of the root folder.\n\nLeave blank normally.\n\nFill in to make rclone start with directory of a given ID.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "permanent_token", + "FieldName": "", + "Help": "Permanent Authentication Token.\n\nA Permanent Authentication Token can be created in the Enterprise File\nFabric, on the users Dashboard under Security, there is an entry\nyou'll see called \"My Authentication Tokens\". Click the Manage button\nto create one.\n\nThese tokens are normally valid for several years.\n\nFor more info see: https://docs.storagemadeeasy.com/organisationcloud/api-tokens\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "Session Token.\n\nThis is a session token which rclone caches in the config file. It is\nusually valid for 1 hour.\n\nDon't set this value - rclone will set it automatically.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_expiry", + "FieldName": "", + "Help": "Token expiry time.\n\nDon't set this value - rclone will set it automatically.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "version", + "FieldName": "", + "Help": "Version read from the file fabric.\n\nDon't set this value - rclone will set it automatically.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50429954, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "filelu", + "Description": "FileLu Cloud Storage", + "Prefix": "filelu", + "Options": [ + { + "Name": "key", + "FieldName": "", + "Help": "Your FileLu Rclone key from My Account", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 536870910, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,CrLf,Del,Ctl,LeftSpace,LeftPeriod,LeftTilde,LeftCrLfHtVt,RightSpace,RightPeriod,RightCrLfHtVt,InvalidUtf8,Dot,SquareBracket,Semicolon,Exclamation", + "ValueStr": "Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,CrLf,Del,Ctl,LeftSpace,LeftPeriod,LeftTilde,LeftCrLfHtVt,RightSpace,RightPeriod,RightCrLfHtVt,InvalidUtf8,Dot,SquareBracket,Semicolon,Exclamation", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "filescom", + "Description": "Files.com", + "Prefix": "filescom", + "Options": [ + { + "Name": "site", + "FieldName": "", + "Help": "Your site subdomain (e.g. mysite) or custom domain (e.g. myfiles.customdomain.com).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "username", + "FieldName": "", + "Help": "The username used to authenticate with Files.com.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "The password used to authenticate with Files.com.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "api_key", + "FieldName": "", + "Help": "The API key used to authenticate with Files.com.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 60923906, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "ftp", + "Description": "FTP", + "Prefix": "ftp", + "Options": [ + { + "Name": "host", + "FieldName": "", + "Help": "FTP host to connect to.\n\nE.g. \"ftp.example.com\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user", + "FieldName": "", + "Help": "FTP username.", + "Default": "vscode", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "vscode", + "ValueStr": "vscode", + "Type": "string" + }, + { + "Name": "port", + "FieldName": "", + "Help": "FTP port number.", + "Default": 21, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "21", + "ValueStr": "21", + "Type": "int" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "FTP password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "tls", + "FieldName": "", + "Help": "Use Implicit FTPS (FTP over TLS).\n\nWhen using implicit FTP over TLS the client connects using TLS\nright from the start which breaks compatibility with\nnon-TLS-aware servers. This is usually served over port 990 rather\nthan port 21. Cannot be used in combination with explicit FTPS.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "explicit_tls", + "FieldName": "", + "Help": "Use Explicit FTPS (FTP over TLS).\n\nWhen using explicit FTP over TLS the client explicitly requests\nsecurity from the server in order to upgrade a plain text connection\nto an encrypted one. Cannot be used in combination with implicit FTPS.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "concurrency", + "FieldName": "", + "Help": "Maximum number of FTP simultaneous connections, 0 for unlimited.\n\nNote that setting this is very likely to cause deadlocks so it should\nbe used with care.\n\nIf you are doing a sync or copy then make sure concurrency is one more\nthan the sum of `--transfers` and `--checkers`.\n\nIf you use `--check-first` then it just needs to be one more than the\nmaximum of `--checkers` and `--transfers`.\n\nSo for `concurrency 3` you'd use `--checkers 2 --transfers 2\n--check-first` or `--checkers 1 --transfers 1`.\n\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "no_check_certificate", + "FieldName": "", + "Help": "Do not verify the TLS certificate of the server.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_epsv", + "FieldName": "", + "Help": "Disable using EPSV even if server advertises support.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_mlsd", + "FieldName": "", + "Help": "Disable using MLSD even if server advertises support.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_utf8", + "FieldName": "", + "Help": "Disable using UTF-8 even if server advertises support.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "writing_mdtm", + "FieldName": "", + "Help": "Use MDTM to set modification time (VsFtpd quirk)", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "force_list_hidden", + "FieldName": "", + "Help": "Use LIST -a to force listing of hidden files and folders. This will disable the use of MLSD.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "idle_timeout", + "FieldName": "", + "Help": "Max time before closing idle connections.\n\nIf no connections have been returned to the connection pool in the time\ngiven, rclone will empty the connection pool.\n\nSet to 0 to keep connections indefinitely.\n", + "Default": 60000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "close_timeout", + "FieldName": "", + "Help": "Maximum time to wait for a response to close.", + "Default": 60000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "tls_cache_size", + "FieldName": "", + "Help": "Size of TLS session cache for all control and data connections.\n\nTLS cache allows to resume TLS sessions and reuse PSK between connections.\nIncrease if default size is not enough resulting in TLS resumption errors.\nEnabled by default. Use 0 to disable.", + "Default": 32, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "32", + "ValueStr": "32", + "Type": "int" + }, + { + "Name": "disable_tls13", + "FieldName": "", + "Help": "Disable TLS 1.3 (workaround for FTP servers with buggy TLS)", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "shut_timeout", + "FieldName": "", + "Help": "Maximum time to wait for data connection closing status.", + "Default": 60000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "ask_password", + "FieldName": "", + "Help": "Allow asking for FTP password when needed.\n\nIf this is set and no password is supplied then rclone will ask for a password\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "socks_proxy", + "FieldName": "", + "Help": "Socks 5 proxy host.\n\t\t\nSupports the format user:pass@host:port, user@host:port, host:port.\n\t\t\nExample:\n\t\t\n myUser:myPass@localhost:9005\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "http_proxy", + "FieldName": "", + "Help": "URL for HTTP CONNECT proxy\n\nSet this to a URL for an HTTP proxy which supports the HTTP CONNECT verb.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "no_check_upload", + "FieldName": "", + "Help": "Don't check the upload is OK\n\nNormally rclone will try to check the upload exists after it has\nuploaded a file to make sure the size and modification time are as\nexpected.\n\nThis flag stops rclone doing these checks. This enables uploading to\nfolders which are write only.\n\nYou will likely need to use the --inplace flag also if uploading to\na write only folder.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 35749890, + "Value": null, + "Examples": [ + { + "Value": "Asterisk,Ctl,Dot,Slash", + "Help": "ProFTPd can't handle '*' in file names" + }, + { + "Value": "BackSlash,Ctl,Del,Dot,RightSpace,Slash,SquareBracket", + "Help": "PureFTPd can't handle '[]' or '*' in file names" + }, + { + "Value": "Ctl,LeftPeriod,Slash", + "Help": "VsFTPd can't handle file names starting with dot" + } ], - "CommandHelp": [ - { - "Name": "stats", - "Short": "Print stats on the cache backend in JSON format.", - "Long": "", - "Opts": null - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Del,Ctl,RightSpace,Dot", + "ValueStr": "Slash,Del,Ctl,RightSpace,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "gofile", + "Description": "Gofile", + "Prefix": "gofile", + "Options": [ + { + "Name": "access_token", + "FieldName": "", + "Help": "API Access token\n\nYou can get this from the web control panel.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "ID of the root folder\n\nLeave this blank normally, rclone will fill it in automatically.\n\nIf you want rclone to be restricted to a particular folder you can\nfill it in - see the docs for more info.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "account_id", + "FieldName": "", + "Help": "Account ID\n\nLeave this blank normally, rclone will fill it in automatically.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "list_chunk", + "FieldName": "", + "Help": "Number of items to list in each call", + "Default": 1000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1000", + "ValueStr": "1000", + "Type": "int" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 323331982, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftPeriod,RightPeriod,InvalidUtf8,Dot,Exclamation", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftPeriod,RightPeriod,InvalidUtf8,Dot,Exclamation", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "google cloud storage", + "Description": "Google Cloud Storage (this is not Google Drive)", + "Prefix": "gcs", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "project_number", + "FieldName": "", + "Help": "Project number.\n\nOptional - needed only for list/create/delete buckets - see your developer console.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user_project", + "FieldName": "", + "Help": "User project.\n\nOptional - needed only for requester pays.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "service_account_file", + "FieldName": "", + "Help": "Service Account Credentials JSON file path.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "service_account_credentials", + "FieldName": "", + "Help": "Service Account Credentials JSON blob.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "access_token", + "FieldName": "", + "Help": "Short-lived access token.\n\nLeave blank normally.\nNeeded only if you want use short-lived access token instead of interactive login.", + "Default": "", + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "anonymous", + "FieldName": "", + "Help": "Access public buckets and objects without credentials.\n\nSet to 'true' if you just want to download files and don't configure credentials.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "object_acl", + "FieldName": "", + "Help": "Access Control List for new objects.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "authenticatedRead", + "Help": "Object owner gets OWNER access.\nAll Authenticated Users get READER access." + }, + { + "Value": "bucketOwnerFullControl", + "Help": "Object owner gets OWNER access.\nProject team owners get OWNER access." + }, + { + "Value": "bucketOwnerRead", + "Help": "Object owner gets OWNER access.\nProject team owners get READER access." + }, + { + "Value": "private", + "Help": "Object owner gets OWNER access.\nDefault if left blank." + }, + { + "Value": "projectPrivate", + "Help": "Object owner gets OWNER access.\nProject team members get access according to their roles." + }, + { + "Value": "publicRead", + "Help": "Object owner gets OWNER access.\nAll Users get READER access." + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "chunker", - "Description": "Transparently chunk/split large files", - "Prefix": "chunker", - "Options": [ - { - "Name": "remote", - "Help": "Remote to chunk/unchunk.\n\nNormally should contain a ':' and a path, e.g. \"myremote:path/to/dir\",\n\"myremote:bucket\" or maybe \"myremote:\" (not recommended).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "chunk_size", - "Help": "Files larger than chunk size will be split in chunks.", - "Provider": "", - "Default": 2147483648, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "2Gi", - "ValueStr": "2Gi", - "Type": "SizeSuffix" - }, - { - "Name": "name_format", - "Help": "String format of chunk file names.\n\nThe two placeholders are: base file name (*) and chunk number (#...).\nThere must be one and only one asterisk and one or more consecutive hash characters.\nIf chunk number has less digits than the number of hashes, it is left-padded by zeros.\nIf there are more digits in the number, they are left as is.\nPossible chunk files are ignored if their name does not match given format.", - "Provider": "", - "Default": "*.rclone_chunk.###", - "Value": null, - "ShortOpt": "", - "Hide": 1, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "*.rclone_chunk.###", - "ValueStr": "*.rclone_chunk.###", - "Type": "string" - }, - { - "Name": "start_from", - "Help": "Minimum valid chunk number. Usually 0 or 1.\n\nBy default chunk numbers start from 1.", - "Provider": "", - "Default": 1, - "Value": null, - "ShortOpt": "", - "Hide": 1, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1", - "ValueStr": "1", - "Type": "int" - }, - { - "Name": "meta_format", - "Help": "Format of the metadata object or \"none\".\n\nBy default \"simplejson\".\nMetadata is a small JSON file named after the composite file.", - "Provider": "", - "Default": "simplejson", - "Value": null, - "Examples": [ - { - "Value": "none", - "Help": "Do not use metadata files at all.\nRequires hash type \"none\".", - "Provider": "" - }, - { - "Value": "simplejson", - "Help": "Simple JSON supports hash sums and chunk validation.\n\nIt has the following fields: ver, size, nchunks, md5, sha1.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 1, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "simplejson", - "ValueStr": "simplejson", - "Type": "string" - }, - { - "Name": "hash_type", - "Help": "Choose how chunker handles hash sums.\n\nAll modes but \"none\" require metadata.", - "Provider": "", - "Default": "md5", - "Value": null, - "Examples": [ - { - "Value": "none", - "Help": "Pass any hash supported by wrapped remote for non-chunked files.\nReturn nothing otherwise.", - "Provider": "" - }, - { - "Value": "md5", - "Help": "MD5 for composite files.", - "Provider": "" - }, - { - "Value": "sha1", - "Help": "SHA1 for composite files.", - "Provider": "" - }, - { - "Value": "md5all", - "Help": "MD5 for all files.", - "Provider": "" - }, - { - "Value": "sha1all", - "Help": "SHA1 for all files.", - "Provider": "" - }, - { - "Value": "md5quick", - "Help": "Copying a file to chunker will request MD5 from the source.\nFalling back to SHA1 if unsupported.", - "Provider": "" - }, - { - "Value": "sha1quick", - "Help": "Similar to \"md5quick\" but prefers SHA1 over MD5.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "md5", - "ValueStr": "md5", - "Type": "string" - }, - { - "Name": "fail_hard", - "Help": "Choose how chunker should handle files with missing or invalid chunks.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "true", - "Help": "Report errors and abort current command.", - "Provider": "" - }, - { - "Value": "false", - "Help": "Warn user, skip incomplete file and proceed.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "transactions", - "Help": "Choose how chunker should handle temporary files during transactions.", - "Provider": "", - "Default": "rename", - "Value": null, - "Examples": [ - { - "Value": "rename", - "Help": "Rename temporary files after a successful transaction.", - "Provider": "" - }, - { - "Value": "norename", - "Help": "Leave temporary file names and write transaction ID to metadata file.\nMetadata is required for no rename transactions (meta format cannot be \"none\").\nIf you are using norename transactions you should be careful not to downgrade Rclone\nas older versions of Rclone don't support this transaction style and will misinterpret\nfiles manipulated by norename transactions.\nThis method is EXPERIMENTAL, don't use on production systems.", - "Provider": "" - }, - { - "Value": "auto", - "Help": "Rename or norename will be used depending on capabilities of the backend.\nIf meta format is set to \"none\", rename transactions will always be used.\nThis method is EXPERIMENTAL, don't use on production systems.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 1, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "rename", - "ValueStr": "rename", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "bucket_acl", + "FieldName": "", + "Help": "Access Control List for new buckets.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "authenticatedRead", + "Help": "Project team owners get OWNER access.\nAll Authenticated Users get READER access." + }, + { + "Value": "private", + "Help": "Project team owners get OWNER access.\nDefault if left blank." + }, + { + "Value": "projectPrivate", + "Help": "Project team members get access according to their roles." + }, + { + "Value": "publicRead", + "Help": "Project team owners get OWNER access.\nAll Users get READER access." + }, + { + "Value": "publicReadWrite", + "Help": "Project team owners get OWNER access.\nAll Users get WRITER access." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "combine", - "Description": "Combine several remotes into one", - "Prefix": "combine", - "Options": [ - { - "Name": "upstreams", - "Help": "Upstreams for combining\n\nThese should be in the form\n\n dir=remote:path dir2=remote2:path\n\nWhere before the = is specified the root directory and after is the remote to\nput there.\n\nEmbedded spaces can be added using quotes\n\n \"dir=remote:path with space\" \"dir2=remote2:path with space\"\n\n", - "Provider": "", - "Default": null, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "SpaceSepList" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "bucket_policy_only", + "FieldName": "", + "Help": "Access checks should use bucket-level IAM policies.\n\nIf you want to upload objects to a bucket with Bucket Policy Only set\nthen you will need to set this.\n\nWhen it is set, rclone:\n\n- ignores ACLs set on buckets\n- ignores ACLs set on objects\n- creates buckets with Bucket Policy Only set\n\nDocs: https://cloud.google.com/storage/docs/bucket-policy-only\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "location", + "FieldName": "", + "Help": "Location for the newly created buckets.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Empty for default location (US)" + }, + { + "Value": "asia", + "Help": "Multi-regional location for Asia" + }, + { + "Value": "eu", + "Help": "Multi-regional location for Europe" + }, + { + "Value": "us", + "Help": "Multi-regional location for United States" + }, + { + "Value": "asia-east1", + "Help": "Taiwan" + }, + { + "Value": "asia-east2", + "Help": "Hong Kong" + }, + { + "Value": "asia-northeast1", + "Help": "Tokyo" + }, + { + "Value": "asia-northeast2", + "Help": "Osaka" + }, + { + "Value": "asia-northeast3", + "Help": "Seoul" + }, + { + "Value": "asia-south1", + "Help": "Mumbai" + }, + { + "Value": "asia-south2", + "Help": "Delhi" + }, + { + "Value": "asia-southeast1", + "Help": "Singapore" + }, + { + "Value": "asia-southeast2", + "Help": "Jakarta" + }, + { + "Value": "australia-southeast1", + "Help": "Sydney" + }, + { + "Value": "australia-southeast2", + "Help": "Melbourne" + }, + { + "Value": "europe-north1", + "Help": "Finland" + }, + { + "Value": "europe-west1", + "Help": "Belgium" + }, + { + "Value": "europe-west2", + "Help": "London" + }, + { + "Value": "europe-west3", + "Help": "Frankfurt" + }, + { + "Value": "europe-west4", + "Help": "Netherlands" + }, + { + "Value": "europe-west6", + "Help": "Zürich" + }, + { + "Value": "europe-central2", + "Help": "Warsaw" + }, + { + "Value": "us-central1", + "Help": "Iowa" + }, + { + "Value": "us-east1", + "Help": "South Carolina" + }, + { + "Value": "us-east4", + "Help": "Northern Virginia" + }, + { + "Value": "us-west1", + "Help": "Oregon" + }, + { + "Value": "us-west2", + "Help": "California" + }, + { + "Value": "us-west3", + "Help": "Salt Lake City" + }, + { + "Value": "us-west4", + "Help": "Las Vegas" + }, + { + "Value": "northamerica-northeast1", + "Help": "Montréal" + }, + { + "Value": "northamerica-northeast2", + "Help": "Toronto" + }, + { + "Value": "southamerica-east1", + "Help": "São Paulo" + }, + { + "Value": "southamerica-west1", + "Help": "Santiago" + }, + { + "Value": "asia1", + "Help": "Dual region: asia-northeast1 and asia-northeast2." + }, + { + "Value": "eur4", + "Help": "Dual region: europe-north1 and europe-west4." + }, + { + "Value": "nam4", + "Help": "Dual region: us-central1 and us-east1." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": null, - "Help": "Any metadata supported by the underlying remote is read and written." + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing objects in Google Cloud Storage.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Default" + }, + { + "Value": "MULTI_REGIONAL", + "Help": "Multi-regional storage class" + }, + { + "Value": "REGIONAL", + "Help": "Regional storage class" + }, + { + "Value": "NEARLINE", + "Help": "Nearline storage class" + }, + { + "Value": "COLDLINE", + "Help": "Coldline storage class" + }, + { + "Value": "ARCHIVE", + "Help": "Archive storage class" + }, + { + "Value": "DURABLE_REDUCED_AVAILABILITY", + "Help": "Durable reduced availability storage class" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "directory_markers", + "FieldName": "", + "Help": "Upload an empty object with a trailing slash when a new directory is created\n\nEmpty folders are unsupported for bucket based remotes, this option creates an empty\nobject ending with \"/\", to persist the folder.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_check_bucket", + "FieldName": "", + "Help": "If set, don't attempt to check the bucket exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the bucket exists already.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "decompress", + "FieldName": "", + "Help": "If set this will decompress gzip encoded objects.\n\nIt is possible to upload objects to GCS with \"Content-Encoding: gzip\"\nset. Normally rclone will download these files as compressed objects.\n\nIf this flag is set then rclone will decompress these files with\n\"Content-Encoding: gzip\" as they are received. This means that rclone\ncan't check the size and hash but the file contents will be decompressed.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for the service.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50348034, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,CrLf,InvalidUtf8,Dot", + "ValueStr": "Slash,CrLf,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "env_auth", + "FieldName": "", + "Help": "Get GCP IAM credentials from runtime (environment variables or instance meta data if no env vars).\n\nOnly applies if service_account_file and service_account_credentials is blank.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "false", + "Help": "Enter credentials in the next step." + }, + { + "Value": "true", + "Help": "Get GCP IAM credentials from the environment (env vars or IAM)." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "google photos", + "Description": "Google Photos", + "Prefix": "gphotos", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "read_only", + "FieldName": "", + "Help": "Set to make the Google Photos backend read only.\n\nIf you choose read only then rclone will only request read only access\nto your photos, otherwise rclone will request full access.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "read_size", + "FieldName": "", + "Help": "Set to read the size of media items.\n\nNormally rclone does not read the size of media items since this takes\nanother transaction. This isn't necessary for syncing. However\nrclone mount needs to know the size of files in advance of reading\nthem, so setting this flag when using rclone mount is recommended if\nyou want to read the media.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "start_year", + "FieldName": "", + "Help": "Year limits the photos to be downloaded to those which are uploaded after the given year.", + "Default": 2000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "2000", + "ValueStr": "2000", + "Type": "int" + }, + { + "Name": "include_archived", + "FieldName": "", + "Help": "Also view and download archived media.\n\nBy default, rclone does not request archived media. Thus, when syncing,\narchived media is not visible in directory listings or transferred.\n\nNote that media in albums is always visible and synced, no matter\ntheir archive status.\n\nWith this flag, archived media are always visible in directory\nlistings and transferred.\n\nWithout this flag, archived media will not be visible in directory\nlistings and won't be transferred.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "proxy", + "FieldName": "", + "Help": "Use the gphotosdl proxy for downloading the full resolution images\n\nThe Google API will deliver images and video which aren't full\nresolution, and/or have EXIF data missing.\n\nHowever if you use the gphotosdl proxy then you can download original,\nunchanged images.\n\nThis runs a headless browser in the background.\n\nDownload the software from [gphotosdl](https://github.com/rclone/gphotosdl)\n\nFirst run with\n\n gphotosdl -login\n\nThen once you have logged into google photos close the browser window\nand run\n\n gphotosdl\n\nThen supply the parameter `--gphotos-proxy \"http://localhost:8282\"` to make\nrclone use the proxy.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50348034, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,CrLf,InvalidUtf8,Dot", + "ValueStr": "Slash,CrLf,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "batch_mode", + "FieldName": "", + "Help": "Upload file batching sync|async|off.\n\nThis sets the batch mode used by rclone.\n\nThis has 3 possible values\n\n- off - no batching\n- sync - batch uploads and check completion (default)\n- async - batch upload and don't check completion\n\nRclone will close any outstanding batches when it exits which may make\na delay on quit.\n", + "Default": "sync", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "sync", + "ValueStr": "sync", + "Type": "string" + }, + { + "Name": "batch_size", + "FieldName": "", + "Help": "Max number of files in upload batch.\n\nThis sets the batch size of files to upload. It has to be less than 50.\n\nBy default this is 0 which means rclone will calculate the batch size\ndepending on the setting of batch_mode.\n\n- batch_mode: async - default batch_size is 50\n- batch_mode: sync - default batch_size is the same as --transfers\n- batch_mode: off - not in use\n\nRclone will close any outstanding batches when it exits which may make\na delay on quit.\n\nSetting this is a great idea if you are uploading lots of small files\nas it will make them a lot quicker. You can use --transfers 32 to\nmaximise throughput.\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "batch_timeout", + "FieldName": "", + "Help": "Max time to allow an idle upload batch before uploading.\n\nIf an upload batch is idle for more than this long then it will be\nuploaded.\n\nThe default for this is 0 which means rclone will choose a sensible\ndefault based on the batch_mode in use.\n\n- batch_mode: async - default batch_timeout is 10s\n- batch_mode: sync - default batch_timeout is 1s\n- batch_mode: off - not in use\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0s", + "ValueStr": "0s", + "Type": "Duration" + }, + { + "Name": "batch_commit_timeout", + "FieldName": "", + "Help": "Max time to wait for a batch to finish committing. (no longer used)", + "Default": 600000000000, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10m0s", + "ValueStr": "10m0s", + "Type": "Duration" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "hasher", + "Description": "Better checksums for other remotes", + "Prefix": "hasher", + "Options": [ + { + "Name": "remote", + "FieldName": "", + "Help": "Remote to cache checksums for (e.g. myRemote:path).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "hashes", + "FieldName": "", + "Help": "Comma separated list of supported checksum types.", + "Default": [ + "md5", + "sha1" + ], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "md5,sha1", + "ValueStr": "md5,sha1", + "Type": "CommaSepList" + }, + { + "Name": "max_age", + "FieldName": "", + "Help": "Maximum time to keep checksums in cache (0 = no cache, off = cache forever).", + "Default": 9223372036854775807, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "Duration" + }, + { + "Name": "auto_size", + "FieldName": "", + "Help": "Auto-update checksum for files smaller than this size (disabled by default).", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "SizeSuffix" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "drop", + "Short": "Drop cache", + "Long": "Completely drop checksum cache.\nUsage Example:\n rclone backend drop hasher:\n", + "Opts": null + }, + { + "Name": "dump", + "Short": "Dump the database", + "Long": "Dump cache records covered by the current remote", + "Opts": null + }, + { + "Name": "fulldump", + "Short": "Full dump of the database", + "Long": "Dump all cache records in the database", + "Opts": null + }, + { + "Name": "import", + "Short": "Import a SUM file", + "Long": "Amend hash cache from a SUM file and bind checksums to files by size/time.\nUsage Example:\n rclone backend import hasher:subdir md5 /path/to/sum.md5\n", + "Opts": null + }, + { + "Name": "stickyimport", + "Short": "Perform fast import of a SUM file", + "Long": "Fill hash cache from a SUM file without verifying file fingerprints.\nUsage Example:\n rclone backend stickyimport hasher:subdir md5 remote:path/to/sum.md5\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": null, + "Help": "Any metadata supported by the underlying remote is read and written." + } + }, + { + "Name": "hdfs", + "Description": "Hadoop distributed file system", + "Prefix": "hdfs", + "Options": [ + { + "Name": "namenode", + "FieldName": "", + "Help": "Hadoop name nodes and ports.\n\nE.g. \"namenode-1:8020,namenode-2:8020,...\" to connect to host namenodes at port 8020.", + "Default": [], + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "CommaSepList" + }, + { + "Name": "username", + "FieldName": "", + "Help": "Hadoop user name.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "root", + "Help": "Connect to hdfs as root." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "service_principal_name", + "FieldName": "", + "Help": "Kerberos service principal name for the namenode.\n\nEnables KERBEROS authentication. Specifies the Service Principal Name\n(SERVICE/FQDN) for the namenode. E.g. \\\"hdfs/namenode.hadoop.docker\\\"\nfor namenode running as service 'hdfs' with FQDN 'namenode.hadoop.docker'.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "data_transfer_protection", + "FieldName": "", + "Help": "Kerberos data transfer protection: authentication|integrity|privacy.\n\nSpecifies whether or not authentication, data signature integrity\nchecks, and wire encryption are required when communicating with\nthe datanodes. Possible values are 'authentication', 'integrity'\nand 'privacy'. Used only with KERBEROS enabled.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "privacy", + "Help": "Ensure authentication, integrity and encryption enabled." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50430082, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Colon,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,Colon,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "hidrive", + "Description": "HiDrive", + "Prefix": "hidrive", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "scope_access", + "FieldName": "", + "Help": "Access permissions that rclone should use when requesting access from HiDrive.", + "Default": "rw", + "Value": null, + "Examples": [ + { + "Value": "rw", + "Help": "Read and write access to resources." + }, + { + "Value": "ro", + "Help": "Read-only access to resources." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "rw", + "ValueStr": "rw", + "Type": "string" + }, + { + "Name": "scope_role", + "FieldName": "", + "Help": "User-level that rclone should use when requesting access from HiDrive.", + "Default": "user", + "Value": null, + "Examples": [ + { + "Value": "user", + "Help": "User-level access to management permissions.\nThis will be sufficient in most cases." + }, + { + "Value": "admin", + "Help": "Extensive access to management permissions." + }, + { + "Value": "owner", + "Help": "Full access to management permissions." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "user", + "ValueStr": "user", + "Type": "string" + }, + { + "Name": "root_prefix", + "FieldName": "", + "Help": "The root/parent folder for all paths.\n\nFill in to use the specified folder as the parent for all paths given to the remote.\nThis way rclone can use any folder as its starting point.", + "Default": "/", + "Value": null, + "Examples": [ + { + "Value": "/", + "Help": "The topmost directory accessible by rclone.\nThis will be equivalent with \"root\" if rclone uses a regular HiDrive user account." + }, + { + "Value": "root", + "Help": "The topmost directory of the HiDrive user account" + }, + { + "Value": "", + "Help": "This specifies that there is no root-prefix for your paths.\nWhen using this you will always need to specify paths to this remote with a valid parent e.g. \"remote:/path/to/dir\" or \"remote:root/path/to/dir\"." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "/", + "ValueStr": "/", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for the service.\n\nThis is the URL that API-calls will be made to.", + "Default": "https://api.hidrive.strato.com/2.1", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "https://api.hidrive.strato.com/2.1", + "ValueStr": "https://api.hidrive.strato.com/2.1", + "Type": "string" + }, + { + "Name": "disable_fetching_member_count", + "FieldName": "", + "Help": "Do not fetch number of objects in directories unless it is absolutely necessary.\n\nRequests may be faster if the number of objects in subdirectories is not fetched.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Chunksize for chunked uploads.\n\nAny files larger than the configured cutoff (or files of unknown size) will be uploaded in chunks of this size.\n\nThe upper limit for this is 2147483647 bytes (about 2.000Gi).\nThat is the maximum amount of bytes a single upload-operation will support.\nSetting this above the upper limit or to a negative value will cause uploads to fail.\n\nSetting this to larger values may increase the upload speed at the cost of using more memory.\nIt can be set to smaller values smaller to save on memory.", + "Default": 50331648, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "48Mi", + "ValueStr": "48Mi", + "Type": "SizeSuffix" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff/Threshold for chunked uploads.\n\nAny files larger than this will be uploaded in chunks of the configured chunksize.\n\nThe upper limit for this is 2147483647 bytes (about 2.000Gi).\nThat is the maximum amount of bytes a single upload-operation will support.\nSetting this above the upper limit will cause uploads to fail.", + "Default": 100663296, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "96Mi", + "ValueStr": "96Mi", + "Type": "SizeSuffix" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for chunked uploads.\n\nThis is the upper limit for how many transfers for the same file are running concurrently.\nSetting this above to a value smaller than 1 will cause uploads to deadlock.\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", + "Default": 4, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4", + "ValueStr": "4", + "Type": "int" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 33554434, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Dot", + "ValueStr": "Slash,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "http", + "Description": "HTTP", + "Prefix": "http", + "Options": [ + { + "Name": "url", + "FieldName": "", + "Help": "URL of HTTP host to connect to.\n\nE.g. \"https://example.com\", or \"https://user:pass@example.com\" to use a username and password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "headers", + "FieldName": "", + "Help": "Set HTTP headers for all transactions.\n\nUse this to set additional HTTP headers for all transactions.\n\nThe input format is comma separated list of key,value pairs. Standard\n[CSV encoding](https://godoc.org/encoding/csv) may be used.\n\nFor example, to set a Cookie use 'Cookie,name=value', or '\"Cookie\",\"name=value\"'.\n\nYou can set multiple headers, e.g. '\"Cookie\",\"name=value\",\"Authorization\",\"xxx\"'.", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "CommaSepList" + }, + { + "Name": "no_slash", + "FieldName": "", + "Help": "Set this if the site doesn't end directories with /.\n\nUse this if your target website does not use / on the end of\ndirectories.\n\nA / on the end of a path is how rclone normally tells the difference\nbetween files and directories. If this flag is set, then rclone will\ntreat all files with Content-Type: text/html as directories and read\nURLs from them rather than downloading them.\n\nNote that this may cause rclone to confuse genuine HTML files with\ndirectories.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_head", + "FieldName": "", + "Help": "Don't use HEAD requests.\n\nHEAD requests are mainly used to find file sizes in dir listing.\nIf your site is being very slow to load then you can try this option.\nNormally rclone does a HEAD request for each potential file in a\ndirectory listing to:\n\n- find its size\n- check it really exists\n- check to see if it is a directory\n\nIf you set this option, rclone will not do the HEAD request. This will mean\nthat directory listings are much quicker, but rclone won't have the times or\nsizes of any files, and some files that don't exist may be in the listing.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_escape", + "FieldName": "", + "Help": "Do not escape URL metacharacters in path names.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "set", + "Short": "Set command for updating the config parameters.", + "Long": "This set command can be used to update the config parameters\nfor a running http backend.\n\nUsage Examples:\n\n rclone backend set remote: [-o opt_name=opt_value] [-o opt_name2=opt_value2]\n rclone rc backend/command command=set fs=remote: [-o opt_name=opt_value] [-o opt_name2=opt_value2]\n rclone rc backend/command command=set fs=remote: -o url=https://example.com\n\nThe option keys are named as they are in the config file.\n\nThis rebuilds the connection to the http backend when it is called with\nthe new parameters. Only new parameters need be passed as the values\nwill default to those currently in use.\n\nIt doesn't return anything.\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "iclouddrive", + "Description": "iCloud Drive", + "Prefix": "iclouddrive", + "Options": [ + { + "Name": "apple_id", + "FieldName": "", + "Help": "Apple ID.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "Password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "trust_token", + "FieldName": "", + "Help": "Trust token (internal use)", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "cookies", + "FieldName": "", + "Help": "cookies (internal use only)", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_id", + "FieldName": "", + "Help": "Client id", + "Default": "d39ba9916b7251055b22c7f910e2ea796ee65e98b2ddecea8f5dde8d9d1a815d", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "d39ba9916b7251055b22c7f910e2ea796ee65e98b2ddecea8f5dde8d9d1a815d", + "ValueStr": "d39ba9916b7251055b22c7f910e2ea796ee65e98b2ddecea8f5dde8d9d1a815d", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438146, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "imagekit", + "Description": "ImageKit.io", + "Prefix": "imagekit", + "Options": [ + { + "Name": "endpoint", + "FieldName": "", + "Help": "You can find your ImageKit.io URL endpoint in your [dashboard](https://imagekit.io/dashboard/developer/api-keys)", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "public_key", + "FieldName": "", + "Help": "You can find your ImageKit.io public key in your [dashboard](https://imagekit.io/dashboard/developer/api-keys)", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "private_key", + "FieldName": "", + "Help": "You can find your ImageKit.io private key in your [dashboard](https://imagekit.io/dashboard/developer/api-keys)", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "only_signed", + "FieldName": "", + "Help": "If you have configured `Restrict unsigned image URLs` in your dashboard settings, set this to true.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "versions", + "FieldName": "", + "Help": "Include old versions in directory listings.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "upload_tags", + "FieldName": "", + "Help": "Tags to add to the uploaded files, e.g. \"tag1,tag2\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 117553486, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Dollar,Question,Hash,Percent,BackSlash,Del,Ctl,InvalidUtf8,Dot,SquareBracket", + "ValueStr": "Slash,LtGt,DoubleQuote,Dollar,Question,Hash,Percent,BackSlash,Del,Ctl,InvalidUtf8,Dot,SquareBracket", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "aws-tags": { + "Help": "AI generated tags by AWS Rekognition associated with the image", + "Type": "string", + "Example": "tag1,tag2", + "ReadOnly": true + }, + "btime": { + "Help": "Time of file birth (creation) read from Last-Modified header", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": true + }, + "custom-coordinates": { + "Help": "Custom coordinates of the file", + "Type": "string", + "Example": "0,0,100,100", + "ReadOnly": true + }, + "file-type": { + "Help": "Type of the file", + "Type": "string", + "Example": "image", + "ReadOnly": true + }, + "google-tags": { + "Help": "AI generated tags by Google Cloud Vision associated with the image", + "Type": "string", + "Example": "tag1,tag2", + "ReadOnly": true + }, + "has-alpha": { + "Help": "Whether the image has alpha channel or not", + "Type": "bool", + "Example": "", + "ReadOnly": true + }, + "height": { + "Help": "Height of the image or video in pixels", + "Type": "int", + "Example": "", + "ReadOnly": true + }, + "is-private-file": { + "Help": "Whether the file is private or not", + "Type": "bool", + "Example": "", + "ReadOnly": true + }, + "size": { + "Help": "Size of the object in bytes", + "Type": "int64", + "Example": "", + "ReadOnly": true + }, + "tags": { + "Help": "Tags associated with the file", + "Type": "string", + "Example": "tag1,tag2", + "ReadOnly": true + }, + "width": { + "Help": "Width of the image or video in pixels", + "Type": "int", + "Example": "", + "ReadOnly": true + } + }, + "Help": "Any metadata supported by the underlying remote is read and written." + } + }, + { + "Name": "internetarchive", + "Description": "Internet Archive", + "Prefix": "internetarchive", + "Options": [ + { + "Name": "access_key_id", + "FieldName": "", + "Help": "IAS3 Access Key.\n\nLeave blank for anonymous access.\nYou can find one here: https://archive.org/account/s3.php", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "secret_access_key", + "FieldName": "", + "Help": "IAS3 Secret Key (password).\n\nLeave blank for anonymous access.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "IAS3 Endpoint.\n\nLeave blank for default value.", + "Default": "https://s3.us.archive.org", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "https://s3.us.archive.org", + "ValueStr": "https://s3.us.archive.org", + "Type": "string" + }, + { + "Name": "front_endpoint", + "FieldName": "", + "Help": "Host of InternetArchive Frontend.\n\nLeave blank for default value.", + "Default": "https://archive.org", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "https://archive.org", + "ValueStr": "https://archive.org", + "Type": "string" + }, + { + "Name": "item_metadata", + "FieldName": "", + "Help": "Metadata to be set on the IA item, this is different from file-level metadata that can be set using --metadata-set.\nFormat is key=value and the 'x-archive-meta-' prefix is automatically added.", + "Default": [], + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "[]", + "ValueStr": "", + "Type": "stringArray" + }, + { + "Name": "item_derive", + "FieldName": "", + "Help": "Whether to trigger derive on the IA item or not. If set to false, the item will not be derived by IA upon upload.\nThe derive process produces a number of secondary files from an upload to make an upload more usable on the web.\nSetting this to false is useful for uploading files that are already in a format that IA can display or reduce burden on IA's infrastructure.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "disable_checksum", + "FieldName": "", + "Help": "Don't ask the server to test against MD5 checksum calculated by rclone.\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can ask the server to check the object against checksum.\nThis is great for data integrity checking but can cause long delays for\nlarge files to start uploading.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "wait_archive", + "FieldName": "", + "Help": "Timeout for waiting the server's processing tasks (specifically archive and book_op) to finish.\nOnly enable if you need to be guaranteed to be reflected after write operations.\n0 to disable waiting. No errors to be thrown in case of timeout.", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0s", + "ValueStr": "0s", + "Type": "Duration" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50446342, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,CrLf,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,CrLf,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "crc32": { + "Help": "CRC32 calculated by Internet Archive", + "Type": "string", + "Example": "01234567", + "ReadOnly": true + }, + "format": { + "Help": "Name of format identified by Internet Archive", + "Type": "string", + "Example": "Comma-Separated Values", + "ReadOnly": true + }, + "md5": { + "Help": "MD5 hash calculated by Internet Archive", + "Type": "string", + "Example": "01234567012345670123456701234567", + "ReadOnly": true + }, + "mtime": { + "Help": "Time of last modification, managed by Rclone", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z", + "ReadOnly": true + }, + "name": { + "Help": "Full file path, without the bucket part", + "Type": "filename", + "Example": "backend/internetarchive/internetarchive.go", + "ReadOnly": true + }, + "old_version": { + "Help": "Whether the file was replaced and moved by keep-old-version flag", + "Type": "boolean", + "Example": "true", + "ReadOnly": true + }, + "rclone-ia-mtime": { + "Help": "Time of last modification, managed by Internet Archive", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z", + "ReadOnly": false + }, + "rclone-mtime": { + "Help": "Time of last modification, managed by Rclone", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z", + "ReadOnly": false + }, + "rclone-update-track": { + "Help": "Random value used by Rclone for tracking changes inside Internet Archive", + "Type": "string", + "Example": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + "ReadOnly": false + }, + "sha1": { + "Help": "SHA1 hash calculated by Internet Archive", + "Type": "string", + "Example": "0123456701234567012345670123456701234567", + "ReadOnly": true + }, + "size": { + "Help": "File size in bytes", + "Type": "decimal number", + "Example": "123456", + "ReadOnly": true + }, + "source": { + "Help": "The source of the file", + "Type": "string", + "Example": "original", + "ReadOnly": true + }, + "summation": { + "Help": "Check https://forum.rclone.org/t/31922 for how it is used", + "Type": "string", + "Example": "md5", + "ReadOnly": true + }, + "viruscheck": { + "Help": "The last time viruscheck process was run for the file (?)", + "Type": "unixtime", + "Example": "1654191352", + "ReadOnly": true + } + }, + "Help": "Metadata fields provided by Internet Archive.\nIf there are multiple values for a key, only the first one is returned.\nThis is a limitation of Rclone, that supports one value per one key.\n\nOwner is able to add custom keys. Metadata feature grabs all the keys including them.\n" + } + }, + { + "Name": "jottacloud", + "Description": "Jottacloud", + "Prefix": "jottacloud", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "md5_memory_limit", + "FieldName": "", + "Help": "Files bigger than this will be cached on disk to calculate the MD5 if required.", + "Default": 10485760, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Mi", + "ValueStr": "10Mi", + "Type": "SizeSuffix" + }, + { + "Name": "trashed_only", + "FieldName": "", + "Help": "Only show files that are in the trash.\n\nThis will show trashed files in their original directory structure.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "hard_delete", + "FieldName": "", + "Help": "Delete files permanently rather than putting them into the trash.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "upload_resume_limit", + "FieldName": "", + "Help": "Files bigger than this can be resumed if the upload fail's.", + "Default": 10485760, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Mi", + "ValueStr": "10Mi", + "Type": "SizeSuffix" + }, + { + "Name": "no_versions", + "FieldName": "", + "Help": "Avoid server side versioning by deleting files and recreating files instead of overwriting them.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50431886, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "btime": { + "Help": "Time of file birth (creation), read from rclone metadata", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + }, + "content-type": { + "Help": "MIME type, also known as media type", + "Type": "string", + "Example": "text/plain", + "ReadOnly": true + }, + "mtime": { + "Help": "Time of last modification, read from rclone metadata", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + }, + "utime": { + "Help": "Time of last upload, when current revision was created, generated by backend", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": true } - }, - { - "Name": "compress", - "Description": "Compress a remote", - "Prefix": "compress", - "Options": [ - { - "Name": "remote", - "Help": "Remote to compress.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "mode", - "Help": "Compression mode.", - "Provider": "", - "Default": "gzip", - "Value": null, - "Examples": [ - { - "Value": "gzip", - "Help": "Standard gzip compression with fastest parameters.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "gzip", - "ValueStr": "gzip", - "Type": "string" - }, - { - "Name": "level", - "Help": "GZIP compression level (-2 to 9).\n\nGenerally -1 (default, equivalent to 5) is recommended.\nLevels 1 to 9 increase compression at the cost of speed. Going past 6 \ngenerally offers very little return.\n\nLevel -2 uses Huffman encoding only. Only use if you know what you\nare doing.\nLevel 0 turns off compression.", - "Provider": "", - "Default": -1, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "-1", - "ValueStr": "-1", - "Type": "int" - }, - { - "Name": "ram_cache_limit", - "Help": "Some remotes don't allow the upload of files with unknown size.\nIn this case the compressed file will need to be cached to determine\nit's size.\n\nFiles smaller than this limit will be cached in RAM, files larger than \nthis limit will be cached on disk.", - "Provider": "", - "Default": 20971520, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "20Mi", - "ValueStr": "20Mi", - "Type": "SizeSuffix" - } + }, + "Help": "Jottacloud has limited support for metadata, currently an extended set of timestamps." + } + }, + { + "Name": "koofr", + "Description": "Koofr, Digi Storage and other Koofr-compatible storage providers", + "Prefix": "koofr", + "Options": [ + { + "Name": "provider", + "FieldName": "", + "Help": "Choose your storage provider.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "koofr", + "Help": "Koofr, https://app.koofr.net/" + }, + { + "Value": "digistorage", + "Help": "Digi Storage, https://storage.rcs-rds.ro/" + }, + { + "Value": "other", + "Help": "Any other Koofr API compatible storage service" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "The Koofr API endpoint to use.", + "Provider": "other", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "mountid", + "FieldName": "", + "Help": "Mount ID of the mount to use.\n\nIf omitted, the primary mount is used.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "setmtime", + "FieldName": "", + "Help": "Does the backend support setting modification time.\n\nSet this to false if you use a mount ID that points to a Dropbox or Amazon Drive backend.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "user", + "FieldName": "", + "Help": "Your user name.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "Your password for rclone generate one at https://app.koofr.net/app/admin/preferences/password.", + "Provider": "koofr", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "Your password for rclone generate one at https://storage.rcs-rds.ro/app/admin/preferences/password.", + "Provider": "digistorage", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "Your password for rclone (generate one at your service's settings page).", + "Provider": "other", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438146, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "linkbox", + "Description": "Linkbox", + "Prefix": "linkbox", + "Options": [ + { + "Name": "token", + "FieldName": "", + "Help": "Token from https://www.linkbox.to/admin/account", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "local", + "Description": "Local Disk", + "Prefix": "local", + "Options": [ + { + "Name": "nounc", + "FieldName": "", + "Help": "Disable UNC (long path names) conversion on Windows.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "true", + "Help": "Disables long file names." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "copy_links", + "FieldName": "", + "Help": "Follow symlinks and copy the pointed to item.", + "Default": false, + "Value": null, + "ShortOpt": "L", + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": true, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "links", + "FieldName": "", + "Help": "Translate symlinks to/from regular files with a '.rclonelink' extension for the local backend.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "skip_links", + "FieldName": "", + "Help": "Don't warn about skipped symlinks.\n\nThis flag disables warning messages on skipped symlinks or junction\npoints, as you explicitly acknowledge that they should be skipped.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": true, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "zero_size_links", + "FieldName": "", + "Help": "Assume the Stat size of links is zero (and read them instead) (deprecated).\n\nRclone used to use the Stat size of links as the link size, but this fails in quite a few places:\n\n- Windows\n- On some virtual filesystems (such ash LucidLink)\n- Android\n\nSo rclone now always reads the link.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "unicode_normalization", + "FieldName": "", + "Help": "Apply unicode NFC normalization to paths and filenames.\n\nThis flag can be used to normalize file names into unicode NFC form\nthat are read from the local filesystem.\n\nRclone does not normally touch the encoding of file names it reads from\nthe file system.\n\nThis can be useful when using macOS as it normally provides decomposed (NFD)\nunicode which in some language (eg Korean) doesn't display properly on\nsome OSes.\n\nNote that rclone compares filenames with unicode normalization in the sync\nroutine so this flag shouldn't normally be used.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_check_updated", + "FieldName": "", + "Help": "Don't check to see if the files change during upload.\n\nNormally rclone checks the size and modification time of files as they\nare being uploaded and aborts with a message which starts \"can't copy -\nsource file is being updated\" if the file changes during upload.\n\nHowever on some file systems this modification time check may fail (e.g.\n[Glusterfs #2206](https://github.com/rclone/rclone/issues/2206)) so this\ncheck can be disabled with this flag.\n\nIf this flag is set, rclone will use its best efforts to transfer a\nfile which is being updated. If the file is only having things\nappended to it (e.g. a log) then rclone will transfer the log file with\nthe size it had the first time rclone saw it.\n\nIf the file is being modified throughout (not just appended to) then\nthe transfer may fail with a hash check failure.\n\nIn detail, once the file has had stat() called on it for the first\ntime we:\n\n- Only transfer the size that stat gave\n- Only checksum the size that stat gave\n- Don't update the stat info for the file\n\n**NB** do not use this flag on a Windows Volume Shadow (VSS). For some\nunknown reason, files in a VSS sometimes show different sizes from the\ndirectory listing (where the initial stat value comes from on Windows)\nand when stat is called on them directly. Other copy tools always use\nthe direct stat value and setting this flag will disable that.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "one_file_system", + "FieldName": "", + "Help": "Don't cross filesystem boundaries (unix/macOS only).", + "Default": false, + "Value": null, + "ShortOpt": "x", + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": true, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "case_sensitive", + "FieldName": "", + "Help": "Force the filesystem to report itself as case sensitive.\n\nNormally the local backend declares itself as case insensitive on\nWindows/macOS and case sensitive for everything else. Use this flag\nto override the default choice.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "case_insensitive", + "FieldName": "", + "Help": "Force the filesystem to report itself as case insensitive.\n\nNormally the local backend declares itself as case insensitive on\nWindows/macOS and case sensitive for everything else. Use this flag\nto override the default choice.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_clone", + "FieldName": "", + "Help": "Disable reflink cloning for server-side copies.\n\nNormally, for local-to-local transfers, rclone will \"clone\" the file when\npossible, and fall back to \"copying\" only when cloning is not supported.\n\nCloning creates a shallow copy (or \"reflink\") which initially shares blocks with\nthe original file. Unlike a \"hardlink\", the two files are independent and\nneither will affect the other if subsequently modified.\n\nCloning is usually preferable to copying, as it is much faster and is\ndeduplicated by default (i.e. having two identical files does not consume more\nstorage than having just one.) However, for use cases where data redundancy is\npreferable, --local-no-clone can be used to disable cloning and force \"deep\" copies.\n\nCurrently, cloning is only supported when using APFS on macOS (support for other\nplatforms may be added in the future.)", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_preallocate", + "FieldName": "", + "Help": "Disable preallocation of disk space for transferred files.\n\nPreallocation of disk space helps prevent filesystem fragmentation.\nHowever, some virtual filesystem layers (such as Google Drive File\nStream) may incorrectly set the actual file size equal to the\npreallocated space, causing checksum and file size checks to fail.\nUse this flag to disable preallocation.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_sparse", + "FieldName": "", + "Help": "Disable sparse files for multi-thread downloads.\n\nOn Windows platforms rclone will make sparse files when doing\nmulti-thread downloads. This avoids long pauses on large files where\nthe OS zeros the file. However sparse files may be undesirable as they\ncause disk fragmentation and can be slow to work with.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_set_modtime", + "FieldName": "", + "Help": "Disable setting modtime.\n\nNormally rclone updates modification time of files after they are done\nuploading. This can cause permissions issues on Linux platforms when \nthe user rclone is running as does not own the file uploaded, such as\nwhen copying to a CIFS mount owned by another user. If this option is \nenabled, rclone will no longer update the modtime after copying a file.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "time_type", + "FieldName": "", + "Help": "Set what kind of time is returned.\n\nNormally rclone does all operations on the mtime or Modification time.\n\nIf you set this flag then rclone will return the Modified time as whatever\nyou set here. So if you use \"rclone lsl --local-time-type ctime\" then\nyou will see ctimes in the listing.\n\nIf the OS doesn't support returning the time_type specified then rclone\nwill silently replace it with the modification time which all OSes support.\n\n- mtime is supported by all OSes\n- atime is supported on all OSes except: plan9, js\n- btime is only supported on: Windows, macOS, freebsd, netbsd\n- ctime is supported on all Oses except: Windows, plan9, js\n\nNote that setting the time will still set the modified time so this is\nonly useful for reading.\n", + "Default": 0, + "Value": null, + "Examples": [ + { + "Value": "mtime", + "Help": "The last modification time." + }, + { + "Value": "atime", + "Help": "The last access time." + }, + { + "Value": "btime", + "Help": "The creation time." + }, + { + "Value": "ctime", + "Help": "The last status change time." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": null, - "Help": "Any metadata supported by the underlying remote is read and written." + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "mtime", + "ValueStr": "mtime", + "Type": "mtime|atime|btime|ctime" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 33554434, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Dot", + "ValueStr": "Slash,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "noop", + "Short": "A null operation for testing backend commands", + "Long": "This is a test command which has some options\nyou can try to change the output.", + "Opts": { + "echo": "echo the input arguments", + "error": "return an error based on option value" } - }, - { - "Name": "drive", - "Description": "Google Drive", - "Prefix": "drive", - "Options": [ - { - "Name": "client_id", - "Help": "Google Application Client Id\nSetting your own is recommended.\nSee https://rclone.org/drive/#making-your-own-client-id for how to create your own.\nIf you leave this blank, it will use an internal key which is low performance.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "scope", - "Help": "Scope that rclone should use when requesting access from drive.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "drive", - "Help": "Full access all files, excluding Application Data Folder.", - "Provider": "" - }, - { - "Value": "drive.readonly", - "Help": "Read-only access to file metadata and file contents.", - "Provider": "" - }, - { - "Value": "drive.file", - "Help": "Access to files created by rclone only.\nThese are visible in the drive website.\nFile authorization is revoked when the user deauthorizes the app.", - "Provider": "" - }, - { - "Value": "drive.appfolder", - "Help": "Allows read and write access to the Application Data folder.\nThis is not visible in the drive website.", - "Provider": "" - }, - { - "Value": "drive.metadata.readonly", - "Help": "Allows read-only access to file metadata but\ndoes not allow any access to read or download file content.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "root_folder_id", - "Help": "ID of the root folder.\nLeave blank normally.\n\nFill in to access \"Computers\" folders (see docs), or for rclone to use\na non root folder as its starting point.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "service_account_file", - "Help": "Service Account Credentials JSON file path.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "service_account_credentials", - "Help": "Service Account Credentials JSON blob.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "team_drive", - "Help": "ID of the Shared Drive (Team Drive).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_owner_only", - "Help": "Only consider files owned by the authenticated user.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "use_trash", - "Help": "Send files to the trash instead of deleting permanently.\n\nDefaults to true, namely sending files to the trash.\nUse `--drive-use-trash=false` to delete files permanently instead.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "copy_shortcut_content", - "Help": "Server side copy contents of shortcuts instead of the shortcut.\n\nWhen doing server side copies, normally rclone will copy shortcuts as\nshortcuts.\n\nIf this flag is used then rclone will copy the contents of shortcuts\nrather than shortcuts themselves when doing server side copies.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "skip_gdocs", - "Help": "Skip google documents in all listings.\n\nIf given, gdocs practically become invisible to rclone.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "skip_checksum_gphotos", - "Help": "Skip MD5 checksum on Google photos and videos only.\n\nUse this if you get checksum errors when transferring Google photos or\nvideos.\n\nSetting this flag will cause Google photos and videos to return a\nblank MD5 checksum.\n\nGoogle photos are identified by being in the \"photos\" space.\n\nCorrupted checksums are caused by Google modifying the image/video but\nnot updating the checksum.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "shared_with_me", - "Help": "Only show files that are shared with me.\n\nInstructs rclone to operate on your \"Shared with me\" folder (where\nGoogle Drive lets you access the files and folders others have shared\nwith you).\n\nThis works both with the \"list\" (lsd, lsl, etc.) and the \"copy\"\ncommands (copy, sync, etc.), and with all other commands too.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "trashed_only", - "Help": "Only show files that are in the trash.\n\nThis will show trashed files in their original directory structure.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "starred_only", - "Help": "Only show files that are starred.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "formats", - "Help": "Deprecated: See export_formats.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "export_formats", - "Help": "Comma separated list of preferred formats for downloading Google docs.", - "Provider": "", - "Default": "docx,xlsx,pptx,svg", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "docx,xlsx,pptx,svg", - "ValueStr": "docx,xlsx,pptx,svg", - "Type": "string" - }, - { - "Name": "import_formats", - "Help": "Comma separated list of preferred formats for uploading Google docs.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "allow_import_name_change", - "Help": "Allow the filetype to change when uploading Google docs.\n\nE.g. file.doc to file.docx. This will confuse sync and reupload every time.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "use_created_date", - "Help": "Use file created date instead of modified date.\n\nUseful when downloading data and you want the creation date used in\nplace of the last modified date.\n\n**WARNING**: This flag may have some unexpected consequences.\n\nWhen uploading to your drive all files will be overwritten unless they\nhaven't been modified since their creation. And the inverse will occur\nwhile downloading. This side effect can be avoided by using the\n\"--checksum\" flag.\n\nThis feature was implemented to retain photos capture date as recorded\nby google photos. You will first need to check the \"Create a Google\nPhotos folder\" option in your google drive settings. You can then copy\nor move the photos locally and use the date the image was taken\n(created) set as the modification date.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "use_shared_date", - "Help": "Use date file was shared instead of modified date.\n\nNote that, as with \"--drive-use-created-date\", this flag may have\nunexpected consequences when uploading/downloading files.\n\nIf both this flag and \"--drive-use-created-date\" are set, the created\ndate is used.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "list_chunk", - "Help": "Size of listing chunk 100-1000, 0 to disable.", - "Provider": "", - "Default": 1000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1000", - "ValueStr": "1000", - "Type": "int" - }, - { - "Name": "impersonate", - "Help": "Impersonate this user when using a service account.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "alternate_export", - "Help": "Deprecated: No longer needed.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to chunked upload.", - "Provider": "", - "Default": 8388608, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "8Mi", - "ValueStr": "8Mi", - "Type": "SizeSuffix" - }, - { - "Name": "chunk_size", - "Help": "Upload chunk size.\n\nMust a power of 2 \u003e= 256k.\n\nMaking this larger will improve performance, but note that each chunk\nis buffered in memory one per transfer.\n\nReducing this will reduce memory usage but decrease performance.", - "Provider": "", - "Default": 8388608, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "8Mi", - "ValueStr": "8Mi", - "Type": "SizeSuffix" - }, - { - "Name": "acknowledge_abuse", - "Help": "Set to allow files which return cannotDownloadAbusiveFile to be downloaded.\n\nIf downloading a file returns the error \"This file has been identified\nas malware or spam and cannot be downloaded\" with the error code\n\"cannotDownloadAbusiveFile\" then supply this flag to rclone to\nindicate you acknowledge the risks of downloading the file and rclone\nwill download it anyway.\n\nNote that if you are using service account it will need Manager\npermission (not Content Manager) to for this flag to work. If the SA\ndoes not have the right permission, Google will just ignore the flag.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "keep_revision_forever", - "Help": "Keep new head revision of each file forever.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "size_as_quota", - "Help": "Show sizes as storage quota usage, not actual size.\n\nShow the size of a file as the storage quota used. This is the\ncurrent version plus any older versions that have been set to keep\nforever.\n\n**WARNING**: This flag may have some unexpected consequences.\n\nIt is not recommended to set this flag in your config - the\nrecommended usage is using the flag form --drive-size-as-quota when\ndoing rclone ls/lsl/lsf/lsjson/etc only.\n\nIf you do use this flag for syncing (not recommended) then you will\nneed to use --ignore size also.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 2, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "v2_download_min_size", - "Help": "If Object's are greater, use drive v2 API to download.", - "Provider": "", - "Default": -1, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "off", - "ValueStr": "off", - "Type": "SizeSuffix" - }, - { - "Name": "pacer_min_sleep", - "Help": "Minimum time to sleep between API calls.", - "Provider": "", - "Default": 100000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "100ms", - "ValueStr": "100ms", - "Type": "Duration" - }, - { - "Name": "pacer_burst", - "Help": "Number of API calls to allow without sleeping.", - "Provider": "", - "Default": 100, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "100", - "ValueStr": "100", - "Type": "int" - }, - { - "Name": "server_side_across_configs", - "Help": "Deprecated: use --server-side-across-configs instead.\n\nAllow server-side operations (e.g. copy) to work across different drive configs.\n\nThis can be useful if you wish to do a server-side copy between two\ndifferent Google drives. Note that this isn't enabled by default\nbecause it isn't easy to tell if it will work between any two\nconfigurations.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_http2", - "Help": "Disable drive using http2.\n\nThere is currently an unsolved issue with the google drive backend and\nHTTP/2. HTTP/2 is therefore disabled by default for the drive backend\nbut can be re-enabled here. When the issue is solved this flag will\nbe removed.\n\nSee: https://github.com/rclone/rclone/issues/3631\n\n", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "stop_on_upload_limit", - "Help": "Make upload limit errors be fatal.\n\nAt the time of writing it is only possible to upload 750 GiB of data to\nGoogle Drive a day (this is an undocumented limit). When this limit is\nreached Google Drive produces a slightly different error message. When\nthis flag is set it causes these errors to be fatal. These will stop\nthe in-progress sync.\n\nNote that this detection is relying on error message strings which\nGoogle don't document so it may break in the future.\n\nSee: https://github.com/rclone/rclone/issues/3857\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "stop_on_download_limit", - "Help": "Make download limit errors be fatal.\n\nAt the time of writing it is only possible to download 10 TiB of data from\nGoogle Drive a day (this is an undocumented limit). When this limit is\nreached Google Drive produces a slightly different error message. When\nthis flag is set it causes these errors to be fatal. These will stop\nthe in-progress sync.\n\nNote that this detection is relying on error message strings which\nGoogle don't document so it may break in the future.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "skip_shortcuts", - "Help": "If set skip shortcut files.\n\nNormally rclone dereferences shortcut files making them appear as if\nthey are the original file (see [the shortcuts section](#shortcuts)).\nIf this flag is set then rclone will ignore shortcut files completely.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "skip_dangling_shortcuts", - "Help": "If set skip dangling shortcut files.\n\nIf this is set then rclone will not show any dangling shortcuts in listings.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "resource_key", - "Help": "Resource key for accessing a link-shared file.\n\nIf you need to access files shared with a link like this\n\n https://drive.google.com/drive/folders/XXX?resourcekey=YYY\u0026usp=sharing\n\nThen you will need to use the first part \"XXX\" as the \"root_folder_id\"\nand the second part \"YYY\" as the \"resource_key\" otherwise you will get\n404 not found errors when trying to access the directory.\n\nSee: https://developers.google.com/drive/api/guides/resource-keys\n\nThis resource key requirement only applies to a subset of old files.\n\nNote also that opening the folder once in the web interface (with the\nuser you've authenticated rclone with) seems to be enough so that the\nresource key is no needed.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 16777216, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "InvalidUtf8", - "ValueStr": "InvalidUtf8", - "Type": "MultiEncoder" - }, - { - "Name": "env_auth", - "Help": "Get IAM credentials from runtime (environment variables or instance meta data if no env vars).\n\nOnly applies if service_account_file and service_account_credentials is blank.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "false", - "Help": "Enter credentials in the next step.", - "Provider": "" - }, - { - "Value": "true", - "Help": "Get GCP IAM credentials from the environment (env vars or IAM).", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - } + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "atime": { + "Help": "Time of last access", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + }, + "btime": { + "Help": "Time of file birth (creation)", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + }, + "gid": { + "Help": "Group ID of owner", + "Type": "decimal number", + "Example": "500", + "ReadOnly": false + }, + "mode": { + "Help": "File type and mode", + "Type": "octal, unix style", + "Example": "0100664", + "ReadOnly": false + }, + "mtime": { + "Help": "Time of last modification", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + }, + "rdev": { + "Help": "Device ID (if special file)", + "Type": "hexadecimal", + "Example": "1abc", + "ReadOnly": false + }, + "uid": { + "Help": "User ID of owner", + "Type": "decimal number", + "Example": "500", + "ReadOnly": false + } + }, + "Help": "Depending on which OS is in use the local backend may return only some\nof the system metadata. Setting system metadata is supported on all\nOSes but setting user metadata is only supported on linux, freebsd,\nnetbsd, macOS and Solaris. It is **not** supported on Windows yet\n([see pkg/attrs#47](https://github.com/pkg/xattr/issues/47)).\n\nUser metadata is stored as extended attributes (which may not be\nsupported by all file systems) under the \"user.*\" prefix.\n\nMetadata is supported on files and directories.\n" + } + }, + { + "Name": "mailru", + "Description": "Mail.ru Cloud", + "Prefix": "mailru", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "user", + "FieldName": "", + "Help": "User name (usually email).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "Password.\n\nThis must be an app password - rclone will not work with your normal\npassword. See the Configuration section in the docs for how to make an\napp password.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "speedup_enable", + "FieldName": "", + "Help": "Skip full upload if there is another file with same data hash.\n\nThis feature is called \"speedup\" or \"put by hash\". It is especially efficient\nin case of generally available files like popular books, video or audio clips,\nbecause files are searched by hash in all accounts of all mailru users.\nIt is meaningless and ineffective if source file is unique or encrypted.\nPlease note that rclone may need local memory and disk space to calculate\ncontent hash in advance and decide whether full upload is required.\nAlso, if rclone does not know file size in advance (e.g. in case of\nstreaming or partial uploads), it will not even try this optimization.", + "Default": true, + "Value": null, + "Examples": [ + { + "Value": "true", + "Help": "Enable" + }, + { + "Value": "false", + "Help": "Disable" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "speedup_file_patterns", + "FieldName": "", + "Help": "Comma separated list of file name patterns eligible for speedup (put by hash).\n\nPatterns are case insensitive and can contain '*' or '?' meta characters.", + "Default": "*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Empty list completely disables speedup (put by hash)." + }, + { + "Value": "*", + "Help": "All files will be attempted for speedup." + }, + { + "Value": "*.mkv,*.avi,*.mp4,*.mp3", + "Help": "Only common audio/video files will be tried for put by hash." + }, + { + "Value": "*.zip,*.gz,*.rar,*.pdf", + "Help": "Only common archives or PDF books will be tried for speedup." + } ], - "CommandHelp": [ - { - "Name": "get", - "Short": "Get command for fetching the drive config parameters", - "Long": "This is a get command which will be used to fetch the various drive config parameters\n\nUsage Examples:\n\n rclone backend get drive: [-o service_account_file] [-o chunk_size]\n rclone rc backend/command command=get fs=drive: [-o service_account_file] [-o chunk_size]\n", - "Opts": { - "chunk_size": "show the current upload chunk size", - "service_account_file": "show the current service account file" - } - }, - { - "Name": "set", - "Short": "Set command for updating the drive config parameters", - "Long": "This is a set command which will be used to update the various drive config parameters\n\nUsage Examples:\n\n rclone backend set drive: [-o service_account_file=sa.json] [-o chunk_size=67108864]\n rclone rc backend/command command=set fs=drive: [-o service_account_file=sa.json] [-o chunk_size=67108864]\n", - "Opts": { - "chunk_size": "update the current upload chunk size", - "service_account_file": "update the current service account file" - } - }, - { - "Name": "shortcut", - "Short": "Create shortcuts from files or directories", - "Long": "This command creates shortcuts from files or directories.\n\nUsage:\n\n rclone backend shortcut drive: source_item destination_shortcut\n rclone backend shortcut drive: source_item -o target=drive2: destination_shortcut\n\nIn the first example this creates a shortcut from the \"source_item\"\nwhich can be a file or a directory to the \"destination_shortcut\". The\n\"source_item\" and the \"destination_shortcut\" should be relative paths\nfrom \"drive:\"\n\nIn the second example this creates a shortcut from the \"source_item\"\nrelative to \"drive:\" to the \"destination_shortcut\" relative to\n\"drive2:\". This may fail with a permission error if the user\nauthenticated with \"drive2:\" can't read files from \"drive:\".\n", - "Opts": { - "target": "optional target remote for the shortcut destination" - } - }, - { - "Name": "drives", - "Short": "List the Shared Drives available to this account", - "Long": "This command lists the Shared Drives (Team Drives) available to this\naccount.\n\nUsage:\n\n rclone backend [-o config] drives drive:\n\nThis will return a JSON list of objects like this\n\n [\n {\n \"id\": \"0ABCDEF-01234567890\",\n \"kind\": \"drive#teamDrive\",\n \"name\": \"My Drive\"\n },\n {\n \"id\": \"0ABCDEFabcdefghijkl\",\n \"kind\": \"drive#teamDrive\",\n \"name\": \"Test Drive\"\n }\n ]\n\nWith the -o config parameter it will output the list in a format\nsuitable for adding to a config file to make aliases for all the\ndrives found and a combined drive.\n\n [My Drive]\n type = alias\n remote = drive,team_drive=0ABCDEF-01234567890,root_folder_id=:\n\n [Test Drive]\n type = alias\n remote = drive,team_drive=0ABCDEFabcdefghijkl,root_folder_id=:\n\n [AllDrives]\n type = combine\n upstreams = \"My Drive=My Drive:\" \"Test Drive=Test Drive:\"\n\nAdding this to the rclone config file will cause those team drives to\nbe accessible with the aliases shown. Any illegal characters will be\nsubstituted with \"_\" and duplicate names will have numbers suffixed.\nIt will also add a remote called AllDrives which shows all the shared\ndrives combined into one directory tree.\n", - "Opts": null - }, - { - "Name": "untrash", - "Short": "Untrash files and directories", - "Long": "This command untrashes all the files and directories in the directory\npassed in recursively.\n\nUsage:\n\nThis takes an optional directory to trash which make this easier to\nuse via the API.\n\n rclone backend untrash drive:directory\n rclone backend --interactive untrash drive:directory subdir\n\nUse the --interactive/-i or --dry-run flag to see what would be restored before restoring it.\n\nResult:\n\n {\n \"Untrashed\": 17,\n \"Errors\": 0\n }\n", - "Opts": null - }, - { - "Name": "copyid", - "Short": "Copy files by ID", - "Long": "This command copies files by ID\n\nUsage:\n\n rclone backend copyid drive: ID path\n rclone backend copyid drive: ID1 path1 ID2 path2\n\nIt copies the drive file with ID given to the path (an rclone path which\nwill be passed internally to rclone copyto). The ID and path pairs can be\nrepeated.\n\nThe path should end with a / to indicate copy the file as named to\nthis directory. If it doesn't end with a / then the last path\ncomponent will be used as the file name.\n\nIf the destination is a drive backend then server-side copying will be\nattempted if possible.\n\nUse the --interactive/-i or --dry-run flag to see what would be copied before copying.\n", - "Opts": null - }, - { - "Name": "exportformats", - "Short": "Dump the export formats for debug purposes", - "Long": "", - "Opts": null - }, - { - "Name": "importformats", - "Short": "Dump the import formats for debug purposes", - "Long": "", - "Opts": null - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf", + "ValueStr": "*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf", + "Type": "string" + }, + { + "Name": "speedup_max_disk", + "FieldName": "", + "Help": "This option allows you to disable speedup (put by hash) for large files.\n\nReason is that preliminary hashing can exhaust your RAM or disk space.", + "Default": 3221225472, + "Value": null, + "Examples": [ + { + "Value": "0", + "Help": "Completely disable speedup (put by hash)." + }, + { + "Value": "1G", + "Help": "Files larger than 1Gb will be uploaded directly." + }, + { + "Value": "3G", + "Help": "Choose this option if you have less than 3Gb free on local disk." + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "dropbox", - "Description": "Dropbox", - "Prefix": "dropbox", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "chunk_size", - "Help": "Upload chunk size (\u003c 150Mi).\n\nAny files larger than this will be uploaded in chunks of this size.\n\nNote that chunks are buffered in memory (one at a time) so rclone can\ndeal with retries. Setting this larger will increase the speed\nslightly (at most 10% for 128 MiB in tests) at the cost of using more\nmemory. It can be set smaller if you are tight on memory.", - "Provider": "", - "Default": 50331648, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "48Mi", - "ValueStr": "48Mi", - "Type": "SizeSuffix" - }, - { - "Name": "impersonate", - "Help": "Impersonate this user when using a business account.\n\nNote that if you want to use impersonate, you should make sure this\nflag is set when running \"rclone config\" as this will cause rclone to\nrequest the \"members.read\" scope which it won't normally. This is\nneeded to lookup a members email address into the internal ID that\ndropbox uses in the API.\n\nUsing the \"members.read\" scope will require a Dropbox Team Admin\nto approve during the OAuth flow.\n\nYou will have to use your own App (setting your own client_id and\nclient_secret) to use this option as currently rclone's default set of\npermissions doesn't include \"members.read\". This can be added once\nv1.55 or later is in use everywhere.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "shared_files", - "Help": "Instructs rclone to work on individual shared files.\n\nIn this mode rclone's features are extremely limited - only list (ls, lsl, etc.) \noperations and read operations (e.g. downloading) are supported in this mode.\nAll other operations will be disabled.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "shared_folders", - "Help": "Instructs rclone to work on shared folders.\n\t\t\t\nWhen this flag is used with no path only the List operation is supported and \nall available shared folders will be listed. If you specify a path the first part \nwill be interpreted as the name of shared folder. Rclone will then try to mount this \nshared to the root namespace. On success shared folder rclone proceeds normally. \nThe shared folder is now pretty much a normal folder and all normal operations \nare supported. \n\nNote that we don't unmount the shared folder afterwards so the \n--dropbox-shared-folders can be omitted after the first use of a particular \nshared folder.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "batch_mode", - "Help": "Upload file batching sync|async|off.\n\nThis sets the batch mode used by rclone.\n\nFor full info see [the main docs](https://rclone.org/dropbox/#batch-mode)\n\nThis has 3 possible values\n\n- off - no batching\n- sync - batch uploads and check completion (default)\n- async - batch upload and don't check completion\n\nRclone will close any outstanding batches when it exits which may make\na delay on quit.\n", - "Provider": "", - "Default": "sync", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "sync", - "ValueStr": "sync", - "Type": "string" - }, - { - "Name": "batch_size", - "Help": "Max number of files in upload batch.\n\nThis sets the batch size of files to upload. It has to be less than 1000.\n\nBy default this is 0 which means rclone which calculate the batch size\ndepending on the setting of batch_mode.\n\n- batch_mode: async - default batch_size is 100\n- batch_mode: sync - default batch_size is the same as --transfers\n- batch_mode: off - not in use\n\nRclone will close any outstanding batches when it exits which may make\na delay on quit.\n\nSetting this is a great idea if you are uploading lots of small files\nas it will make them a lot quicker. You can use --transfers 32 to\nmaximise throughput.\n", - "Provider": "", - "Default": 0, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "0", - "ValueStr": "0", - "Type": "int" - }, - { - "Name": "batch_timeout", - "Help": "Max time to allow an idle upload batch before uploading.\n\nIf an upload batch is idle for more than this long then it will be\nuploaded.\n\nThe default for this is 0 which means rclone will choose a sensible\ndefault based on the batch_mode in use.\n\n- batch_mode: async - default batch_timeout is 10s\n- batch_mode: sync - default batch_timeout is 500ms\n- batch_mode: off - not in use\n", - "Provider": "", - "Default": 0, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "0s", - "ValueStr": "0s", - "Type": "Duration" - }, - { - "Name": "batch_commit_timeout", - "Help": "Max time to wait for a batch to finish committing", - "Provider": "", - "Default": 600000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10m0s", - "ValueStr": "10m0s", - "Type": "Duration" - }, - { - "Name": "pacer_min_sleep", - "Help": "Minimum time to sleep between API calls.", - "Provider": "", - "Default": 10000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10ms", - "ValueStr": "10ms", - "Type": "Duration" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 52469762, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,BackSlash,Del,RightSpace,InvalidUtf8,Dot", - "ValueStr": "Slash,BackSlash,Del,RightSpace,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "3Gi", + "ValueStr": "3Gi", + "Type": "SizeSuffix" + }, + { + "Name": "speedup_max_memory", + "FieldName": "", + "Help": "Files larger than the size given below will always be hashed on disk.", + "Default": 33554432, + "Value": null, + "Examples": [ + { + "Value": "0", + "Help": "Preliminary hashing will always be done in a temporary disk location." + }, + { + "Value": "32M", + "Help": "Do not dedicate more than 32Mb RAM for preliminary hashing." + }, + { + "Value": "256M", + "Help": "You have at most 256Mb RAM free for hash calculations." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "fichier", - "Description": "1Fichier", - "Prefix": "fichier", - "Options": [ - { - "Name": "api_key", - "Help": "Your API Key, get it from https://1fichier.com/console/params.pl.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "shared_folder", - "Help": "If you want to download a shared folder, add this parameter.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "file_password", - "Help": "If you want to download a shared file that is password protected, add this parameter.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "folder_password", - "Help": "If you want to list the files in a shared folder that is password protected, add this parameter.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "cdn", - "Help": "Set if you wish to use CDN download links.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 52666494, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,BackSlash,Del,Ctl,LeftSpace,RightSpace,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,BackSlash,Del,Ctl,LeftSpace,RightSpace,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "32Mi", + "ValueStr": "32Mi", + "Type": "SizeSuffix" + }, + { + "Name": "check_hash", + "FieldName": "", + "Help": "What should copy do if file checksum is mismatched or invalid.", + "Default": true, + "Value": null, + "Examples": [ + { + "Value": "true", + "Help": "Fail with error." + }, + { + "Value": "false", + "Help": "Ignore and continue." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "filefabric", - "Description": "Enterprise File Fabric", - "Prefix": "filefabric", - "Options": [ - { - "Name": "url", - "Help": "URL of the Enterprise File Fabric to connect to.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "https://storagemadeeasy.com", - "Help": "Storage Made Easy US", - "Provider": "" - }, - { - "Value": "https://eu.storagemadeeasy.com", - "Help": "Storage Made Easy EU", - "Provider": "" - }, - { - "Value": "https://yourfabric.smestorage.com", - "Help": "Connect to your Enterprise File Fabric", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "root_folder_id", - "Help": "ID of the root folder.\n\nLeave blank normally.\n\nFill in to make rclone start with directory of a given ID.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "permanent_token", - "Help": "Permanent Authentication Token.\n\nA Permanent Authentication Token can be created in the Enterprise File\nFabric, on the users Dashboard under Security, there is an entry\nyou'll see called \"My Authentication Tokens\". Click the Manage button\nto create one.\n\nThese tokens are normally valid for several years.\n\nFor more info see: https://docs.storagemadeeasy.com/organisationcloud/api-tokens\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "Session Token.\n\nThis is a session token which rclone caches in the config file. It is\nusually valid for 1 hour.\n\nDon't set this value - rclone will set it automatically.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_expiry", - "Help": "Token expiry time.\n\nDon't set this value - rclone will set it automatically.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "version", - "Help": "Version read from the file fabric.\n\nDon't set this value - rclone will set it automatically.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50429954, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "user_agent", + "FieldName": "", + "Help": "HTTP user agent used internally by client.\n\nDefaults to \"rclone/VERSION\" or \"--user-agent\" provided on command line.", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "quirks", + "FieldName": "", + "Help": "Comma separated list of internal maintenance flags.\n\nThis option must not be used by an ordinary user. It is intended only to\nfacilitate remote troubleshooting of backend issues. Strict meaning of\nflags is not documented and not guaranteed to persist between releases.\nQuirks will be removed when the backend grows stable.\nSupported quirks: atomicmkdir binlist unknowndirs", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50440078, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "mega", + "Description": "Mega", + "Prefix": "mega", + "Options": [ + { + "Name": "user", + "FieldName": "", + "Help": "User name.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "Password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "debug", + "FieldName": "", + "Help": "Output more debug from Mega.\n\nIf this flag is set (along with -vv) it will print further debugging\ninformation from the mega backend.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "hard_delete", + "FieldName": "", + "Help": "Delete files permanently rather than putting them into the trash.\n\nNormally the mega backend will put all deletions into the trash rather\nthan permanently deleting them. If you specify this then rclone will\npermanently delete objects instead.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_https", + "FieldName": "", + "Help": "Use HTTPS for transfers.\n\nMEGA uses plain text HTTP connections by default.\nSome ISPs throttle HTTP connections, this causes transfers to become very slow.\nEnabling this will force MEGA to use HTTPS for all transfers.\nHTTPS is normally not necessary since all data is already encrypted anyway.\nEnabling it will increase CPU usage and add network overhead.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50331650, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,InvalidUtf8,Dot", + "ValueStr": "Slash,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "memory", + "Description": "In memory object storage system.", + "Prefix": "memory", + "Options": [ + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "netstorage", + "Description": "Akamai NetStorage", + "Prefix": "netstorage", + "Options": [ + { + "Name": "protocol", + "FieldName": "", + "Help": "Select between HTTP or HTTPS protocol.\n\nMost users should choose HTTPS, which is the default.\nHTTP is provided primarily for debugging purposes.", + "Default": "https", + "Value": null, + "Examples": [ + { + "Value": "http", + "Help": "HTTP protocol" + }, + { + "Value": "https", + "Help": "HTTPS protocol" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "ftp", - "Description": "FTP", - "Prefix": "ftp", - "Options": [ - { - "Name": "host", - "Help": "FTP host to connect to.\n\nE.g. \"ftp.example.com\".", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "FTP username.", - "Provider": "", - "Default": "zenon", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "zenon", - "ValueStr": "zenon", - "Type": "string" - }, - { - "Name": "port", - "Help": "FTP port number.", - "Provider": "", - "Default": 21, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "21", - "ValueStr": "21", - "Type": "int" - }, - { - "Name": "pass", - "Help": "FTP password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "tls", - "Help": "Use Implicit FTPS (FTP over TLS).\n\nWhen using implicit FTP over TLS the client connects using TLS\nright from the start which breaks compatibility with\nnon-TLS-aware servers. This is usually served over port 990 rather\nthan port 21. Cannot be used in combination with explicit FTPS.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "explicit_tls", - "Help": "Use Explicit FTPS (FTP over TLS).\n\nWhen using explicit FTP over TLS the client explicitly requests\nsecurity from the server in order to upgrade a plain text connection\nto an encrypted one. Cannot be used in combination with implicit FTPS.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "concurrency", - "Help": "Maximum number of FTP simultaneous connections, 0 for unlimited.\n\nNote that setting this is very likely to cause deadlocks so it should\nbe used with care.\n\nIf you are doing a sync or copy then make sure concurrency is one more\nthan the sum of `--transfers` and `--checkers`.\n\nIf you use `--check-first` then it just needs to be one more than the\nmaximum of `--checkers` and `--transfers`.\n\nSo for `concurrency 3` you'd use `--checkers 2 --transfers 2\n--check-first` or `--checkers 1 --transfers 1`.\n\n", - "Provider": "", - "Default": 0, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "0", - "ValueStr": "0", - "Type": "int" - }, - { - "Name": "no_check_certificate", - "Help": "Do not verify the TLS certificate of the server.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_epsv", - "Help": "Disable using EPSV even if server advertises support.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_mlsd", - "Help": "Disable using MLSD even if server advertises support.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_utf8", - "Help": "Disable using UTF-8 even if server advertises support.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "writing_mdtm", - "Help": "Use MDTM to set modification time (VsFtpd quirk)", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "force_list_hidden", - "Help": "Use LIST -a to force listing of hidden files and folders. This will disable the use of MLSD.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "idle_timeout", - "Help": "Max time before closing idle connections.\n\nIf no connections have been returned to the connection pool in the time\ngiven, rclone will empty the connection pool.\n\nSet to 0 to keep connections indefinitely.\n", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "close_timeout", - "Help": "Maximum time to wait for a response to close.", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "tls_cache_size", - "Help": "Size of TLS session cache for all control and data connections.\n\nTLS cache allows to resume TLS sessions and reuse PSK between connections.\nIncrease if default size is not enough resulting in TLS resumption errors.\nEnabled by default. Use 0 to disable.", - "Provider": "", - "Default": 32, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "32", - "ValueStr": "32", - "Type": "int" - }, - { - "Name": "disable_tls13", - "Help": "Disable TLS 1.3 (workaround for FTP servers with buggy TLS)", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "shut_timeout", - "Help": "Maximum time to wait for data connection closing status.", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "ask_password", - "Help": "Allow asking for FTP password when needed.\n\nIf this is set and no password is supplied then rclone will ask for a password\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 35749890, - "Value": null, - "Examples": [ - { - "Value": "Asterisk,Ctl,Dot,Slash", - "Help": "ProFTPd can't handle '*' in file names", - "Provider": "" - }, - { - "Value": "BackSlash,Ctl,Del,Dot,RightSpace,Slash,SquareBracket", - "Help": "PureFTPd can't handle '[]' or '*' in file names", - "Provider": "" - }, - { - "Value": "Ctl,LeftPeriod,Slash", - "Help": "VsFTPd can't handle file names starting with dot", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Del,Ctl,RightSpace,Dot", - "ValueStr": "Slash,Del,Ctl,RightSpace,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "https", + "ValueStr": "https", + "Type": "string" + }, + { + "Name": "host", + "FieldName": "", + "Help": "Domain+path of NetStorage host to connect to.\n\nFormat should be `/`", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "account", + "FieldName": "", + "Help": "Set the NetStorage account name", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "secret", + "FieldName": "", + "Help": "Set the NetStorage account secret/G2O key for authentication.\n\nPlease choose the 'y' option to set your own password then enter your secret.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "du", + "Short": "Return disk usage information for a specified directory", + "Long": "The usage information returned, includes the targeted directory as well as all\nfiles stored in any sub-directories that may exist.", + "Opts": null + }, + { + "Name": "symlink", + "Short": "You can create a symbolic link in ObjectStore with the symlink action.", + "Long": "The desired path location (including applicable sub-directories) ending in\nthe object that will be the target of the symlink (for example, /links/mylink).\nInclude the file extension for the object, if applicable.\n`rclone backend symlink `", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "onedrive", + "Description": "Microsoft OneDrive", + "Prefix": "onedrive", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Choose national cloud region for OneDrive.", + "Default": "global", + "Value": null, + "Examples": [ + { + "Value": "global", + "Help": "Microsoft Cloud Global" + }, + { + "Value": "us", + "Help": "Microsoft Cloud for US Government" + }, + { + "Value": "de", + "Help": "Microsoft Cloud Germany (deprecated - try global region first)." + }, + { + "Value": "cn", + "Help": "Azure and Office 365 operated by Vnet Group in China" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "google cloud storage", - "Description": "Google Cloud Storage (this is not Google Drive)", - "Prefix": "gcs", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "project_number", - "Help": "Project number.\n\nOptional - needed only for list/create/delete buckets - see your developer console.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user_project", - "Help": "User project.\n\nOptional - needed only for requester pays.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "service_account_file", - "Help": "Service Account Credentials JSON file path.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "service_account_credentials", - "Help": "Service Account Credentials JSON blob.\n\nLeave blank normally.\nNeeded only if you want use SA instead of interactive login.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "anonymous", - "Help": "Access public buckets and objects without credentials.\n\nSet to 'true' if you just want to download files and don't configure credentials.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "object_acl", - "Help": "Access Control List for new objects.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "authenticatedRead", - "Help": "Object owner gets OWNER access.\nAll Authenticated Users get READER access.", - "Provider": "" - }, - { - "Value": "bucketOwnerFullControl", - "Help": "Object owner gets OWNER access.\nProject team owners get OWNER access.", - "Provider": "" - }, - { - "Value": "bucketOwnerRead", - "Help": "Object owner gets OWNER access.\nProject team owners get READER access.", - "Provider": "" - }, - { - "Value": "private", - "Help": "Object owner gets OWNER access.\nDefault if left blank.", - "Provider": "" - }, - { - "Value": "projectPrivate", - "Help": "Object owner gets OWNER access.\nProject team members get access according to their roles.", - "Provider": "" - }, - { - "Value": "publicRead", - "Help": "Object owner gets OWNER access.\nAll Users get READER access.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "bucket_acl", - "Help": "Access Control List for new buckets.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "authenticatedRead", - "Help": "Project team owners get OWNER access.\nAll Authenticated Users get READER access.", - "Provider": "" - }, - { - "Value": "private", - "Help": "Project team owners get OWNER access.\nDefault if left blank.", - "Provider": "" - }, - { - "Value": "projectPrivate", - "Help": "Project team members get access according to their roles.", - "Provider": "" - }, - { - "Value": "publicRead", - "Help": "Project team owners get OWNER access.\nAll Users get READER access.", - "Provider": "" - }, - { - "Value": "publicReadWrite", - "Help": "Project team owners get OWNER access.\nAll Users get WRITER access.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "bucket_policy_only", - "Help": "Access checks should use bucket-level IAM policies.\n\nIf you want to upload objects to a bucket with Bucket Policy Only set\nthen you will need to set this.\n\nWhen it is set, rclone:\n\n- ignores ACLs set on buckets\n- ignores ACLs set on objects\n- creates buckets with Bucket Policy Only set\n\nDocs: https://cloud.google.com/storage/docs/bucket-policy-only\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "location", - "Help": "Location for the newly created buckets.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Empty for default location (US)", - "Provider": "" - }, - { - "Value": "asia", - "Help": "Multi-regional location for Asia", - "Provider": "" - }, - { - "Value": "eu", - "Help": "Multi-regional location for Europe", - "Provider": "" - }, - { - "Value": "us", - "Help": "Multi-regional location for United States", - "Provider": "" - }, - { - "Value": "asia-east1", - "Help": "Taiwan", - "Provider": "" - }, - { - "Value": "asia-east2", - "Help": "Hong Kong", - "Provider": "" - }, - { - "Value": "asia-northeast1", - "Help": "Tokyo", - "Provider": "" - }, - { - "Value": "asia-northeast2", - "Help": "Osaka", - "Provider": "" - }, - { - "Value": "asia-northeast3", - "Help": "Seoul", - "Provider": "" - }, - { - "Value": "asia-south1", - "Help": "Mumbai", - "Provider": "" - }, - { - "Value": "asia-south2", - "Help": "Delhi", - "Provider": "" - }, - { - "Value": "asia-southeast1", - "Help": "Singapore", - "Provider": "" - }, - { - "Value": "asia-southeast2", - "Help": "Jakarta", - "Provider": "" - }, - { - "Value": "australia-southeast1", - "Help": "Sydney", - "Provider": "" - }, - { - "Value": "australia-southeast2", - "Help": "Melbourne", - "Provider": "" - }, - { - "Value": "europe-north1", - "Help": "Finland", - "Provider": "" - }, - { - "Value": "europe-west1", - "Help": "Belgium", - "Provider": "" - }, - { - "Value": "europe-west2", - "Help": "London", - "Provider": "" - }, - { - "Value": "europe-west3", - "Help": "Frankfurt", - "Provider": "" - }, - { - "Value": "europe-west4", - "Help": "Netherlands", - "Provider": "" - }, - { - "Value": "europe-west6", - "Help": "Zürich", - "Provider": "" - }, - { - "Value": "europe-central2", - "Help": "Warsaw", - "Provider": "" - }, - { - "Value": "us-central1", - "Help": "Iowa", - "Provider": "" - }, - { - "Value": "us-east1", - "Help": "South Carolina", - "Provider": "" - }, - { - "Value": "us-east4", - "Help": "Northern Virginia", - "Provider": "" - }, - { - "Value": "us-west1", - "Help": "Oregon", - "Provider": "" - }, - { - "Value": "us-west2", - "Help": "California", - "Provider": "" - }, - { - "Value": "us-west3", - "Help": "Salt Lake City", - "Provider": "" - }, - { - "Value": "us-west4", - "Help": "Las Vegas", - "Provider": "" - }, - { - "Value": "northamerica-northeast1", - "Help": "Montréal", - "Provider": "" - }, - { - "Value": "northamerica-northeast2", - "Help": "Toronto", - "Provider": "" - }, - { - "Value": "southamerica-east1", - "Help": "São Paulo", - "Provider": "" - }, - { - "Value": "southamerica-west1", - "Help": "Santiago", - "Provider": "" - }, - { - "Value": "asia1", - "Help": "Dual region: asia-northeast1 and asia-northeast2.", - "Provider": "" - }, - { - "Value": "eur4", - "Help": "Dual region: europe-north1 and europe-west4.", - "Provider": "" - }, - { - "Value": "nam4", - "Help": "Dual region: us-central1 and us-east1.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing objects in Google Cloud Storage.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Default", - "Provider": "" - }, - { - "Value": "MULTI_REGIONAL", - "Help": "Multi-regional storage class", - "Provider": "" - }, - { - "Value": "REGIONAL", - "Help": "Regional storage class", - "Provider": "" - }, - { - "Value": "NEARLINE", - "Help": "Nearline storage class", - "Provider": "" - }, - { - "Value": "COLDLINE", - "Help": "Coldline storage class", - "Provider": "" - }, - { - "Value": "ARCHIVE", - "Help": "Archive storage class", - "Provider": "" - }, - { - "Value": "DURABLE_REDUCED_AVAILABILITY", - "Help": "Durable reduced availability storage class", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "directory_markers", - "Help": "Upload an empty object with a trailing slash when a new directory is created\n\nEmpty folders are unsupported for bucket based remotes, this option creates an empty\nobject ending with \"/\", to persist the folder.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_check_bucket", - "Help": "If set, don't attempt to check the bucket exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the bucket exists already.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "decompress", - "Help": "If set this will decompress gzip encoded objects.\n\nIt is possible to upload objects to GCS with \"Content-Encoding: gzip\"\nset. Normally rclone will download these files as compressed objects.\n\nIf this flag is set then rclone will decompress these files with\n\"Content-Encoding: gzip\" as they are received. This means that rclone\ncan't check the size and hash but the file contents will be decompressed.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "endpoint", - "Help": "Endpoint for the service.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50348034, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,CrLf,InvalidUtf8,Dot", - "ValueStr": "Slash,CrLf,InvalidUtf8,Dot", - "Type": "MultiEncoder" - }, - { - "Name": "env_auth", - "Help": "Get GCP IAM credentials from runtime (environment variables or instance meta data if no env vars).\n\nOnly applies if service_account_file and service_account_credentials is blank.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "false", - "Help": "Enter credentials in the next step.", - "Provider": "" - }, - { - "Value": "true", - "Help": "Get GCP IAM credentials from the environment (env vars or IAM).", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "global", + "ValueStr": "global", + "Type": "string" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to chunked upload.\n\nAny files larger than this will be uploaded in chunks of chunk_size.\n\nThis is disabled by default as uploading using single part uploads\ncauses rclone to use twice the storage on Onedrive business as when\nrclone sets the modification time after the upload Onedrive creates a\nnew version.\n\nSee: https://github.com/rclone/rclone/issues/1716\n", + "Default": -1, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "SizeSuffix" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Chunk size to upload files with - must be multiple of 320k (327,680 bytes).\n\nAbove this size files will be chunked - must be multiple of 320k (327,680 bytes) and\nshould not exceed 250M (262,144,000 bytes) else you may encounter \\\"Microsoft.SharePoint.Client.InvalidClientQueryException: The request message is too big.\\\"\nNote that the chunks will be buffered into memory.", + "Default": 10485760, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Mi", + "ValueStr": "10Mi", + "Type": "SizeSuffix" + }, + { + "Name": "drive_id", + "FieldName": "", + "Help": "The ID of the drive to use.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "drive_type", + "FieldName": "", + "Help": "The type of the drive (personal | business | documentLibrary).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "ID of the root folder.\n\nThis isn't normally needed, but in special circumstances you might\nknow the folder ID that you wish to access but not be able to get\nthere through a path traversal.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "access_scopes", + "FieldName": "", + "Help": "Set scopes to be requested by rclone.\n\nChoose or manually enter a custom space separated list with all scopes, that rclone should request.\n", + "Default": [ + "Files.Read", + "Files.ReadWrite", + "Files.Read.All", + "Files.ReadWrite.All", + "Sites.Read.All", + "offline_access" ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "google photos", - "Description": "Google Photos", - "Prefix": "gphotos", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "read_only", - "Help": "Set to make the Google Photos backend read only.\n\nIf you choose read only then rclone will only request read only access\nto your photos, otherwise rclone will request full access.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "read_size", - "Help": "Set to read the size of media items.\n\nNormally rclone does not read the size of media items since this takes\nanother transaction. This isn't necessary for syncing. However\nrclone mount needs to know the size of files in advance of reading\nthem, so setting this flag when using rclone mount is recommended if\nyou want to read the media.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "start_year", - "Help": "Year limits the photos to be downloaded to those which are uploaded after the given year.", - "Provider": "", - "Default": 2000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "2000", - "ValueStr": "2000", - "Type": "int" - }, - { - "Name": "include_archived", - "Help": "Also view and download archived media.\n\nBy default, rclone does not request archived media. Thus, when syncing,\narchived media is not visible in directory listings or transferred.\n\nNote that media in albums is always visible and synced, no matter\ntheir archive status.\n\nWith this flag, archived media are always visible in directory\nlistings and transferred.\n\nWithout this flag, archived media will not be visible in directory\nlistings and won't be transferred.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50348034, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,CrLf,InvalidUtf8,Dot", - "ValueStr": "Slash,CrLf,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Value": null, + "Examples": [ + { + "Value": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access", + "Help": "Read and write access to all resources" + }, + { + "Value": "Files.Read Files.Read.All Sites.Read.All offline_access", + "Help": "Read only access to all resources" + }, + { + "Value": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All offline_access", + "Help": "Read and write access to all resources, without the ability to browse SharePoint sites. \nSame as if disable_site_permission was set to true" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "hasher", - "Description": "Better checksums for other remotes", - "Prefix": "hasher", - "Options": [ - { - "Name": "remote", - "Help": "Remote to cache checksums for (e.g. myRemote:path).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "hashes", - "Help": "Comma separated list of supported checksum types.", - "Provider": "", - "Default": [ - "md5", - "sha1" - ], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "md5,sha1", - "ValueStr": "md5,sha1", - "Type": "CommaSepList" - }, - { - "Name": "max_age", - "Help": "Maximum time to keep checksums in cache (0 = no cache, off = cache forever).", - "Provider": "", - "Default": 9223372036854775807, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "off", - "ValueStr": "off", - "Type": "Duration" - }, - { - "Name": "auto_size", - "Help": "Auto-update checksum for files smaller than this size (disabled by default).", - "Provider": "", - "Default": 0, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "0", - "ValueStr": "0", - "Type": "SizeSuffix" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access", + "ValueStr": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access", + "Type": "SpaceSepList" + }, + { + "Name": "tenant", + "FieldName": "", + "Help": "ID of the service principal's tenant. Also called its directory ID.\n\nSet this if using\n- Client Credential flow\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "disable_site_permission", + "FieldName": "", + "Help": "Disable the request for Sites.Read.All permission.\n\nIf set to true, you will no longer be able to search for a SharePoint site when\nconfiguring drive ID, because rclone will not request Sites.Read.All permission.\nSet it to true if your organization didn't assign Sites.Read.All permission to the\napplication, and your organization disallows users to consent app permission\nrequest on their own.", + "Default": false, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "expose_onenote_files", + "FieldName": "", + "Help": "Set to make OneNote files show up in directory listings.\n\nBy default, rclone will hide OneNote files in directory listings because\noperations like \"Open\" and \"Update\" won't work on them. But this\nbehaviour may also prevent you from deleting them. If you want to\ndelete OneNote files or otherwise want them to show up in directory\nlisting, set this option.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "server_side_across_configs", + "FieldName": "", + "Help": "Deprecated: use --server-side-across-configs instead.\n\nAllow server-side operations (e.g. copy) to work across different onedrive configs.\n\nThis will work if you are copying between two OneDrive *Personal* drives AND the files to\ncopy are already shared between them. Additionally, it should also function for a user who\nhas access permissions both between Onedrive for *business* and *SharePoint* under the *same\ntenant*, and between *SharePoint* and another *SharePoint* under the *same tenant*. In other\ncases, rclone will fall back to normal copy (which will be slightly slower).", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "list_chunk", + "FieldName": "", + "Help": "Size of listing chunk.", + "Default": 1000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1000", + "ValueStr": "1000", + "Type": "int" + }, + { + "Name": "no_versions", + "FieldName": "", + "Help": "Remove all versions on modifying operations.\n\nOnedrive for business creates versions when rclone uploads new files\noverwriting an existing one and when it sets the modification time.\n\nThese versions take up space out of the quota.\n\nThis flag checks for versions after file upload and setting\nmodification time and removes all but the last version.\n\n**NB** Onedrive personal can't currently delete versions so don't use\nthis flag there.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "hard_delete", + "FieldName": "", + "Help": "Permanently delete files on removal.\n\nNormally files will get sent to the recycle bin on deletion. Setting\nthis flag causes them to be permanently deleted. Use with care.\n\nOneDrive personal accounts do not support the permanentDelete API,\nit only applies to OneDrive for Business and SharePoint document libraries.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "link_scope", + "FieldName": "", + "Help": "Set the scope of the links created by the link command.", + "Default": "anonymous", + "Value": null, + "Examples": [ + { + "Value": "anonymous", + "Help": "Anyone with the link has access, without needing to sign in.\nThis may include people outside of your organization.\nAnonymous link support may be disabled by an administrator." + }, + { + "Value": "organization", + "Help": "Anyone signed into your organization (tenant) can use the link to get access.\nOnly available in OneDrive for Business and SharePoint." + } ], - "CommandHelp": [ - { - "Name": "drop", - "Short": "Drop cache", - "Long": "Completely drop checksum cache.\nUsage Example:\n rclone backend drop hasher:\n", - "Opts": null - }, - { - "Name": "dump", - "Short": "Dump the database", - "Long": "Dump cache records covered by the current remote", - "Opts": null - }, - { - "Name": "fulldump", - "Short": "Full dump of the database", - "Long": "Dump all cache records in the database", - "Opts": null - }, - { - "Name": "import", - "Short": "Import a SUM file", - "Long": "Amend hash cache from a SUM file and bind checksums to files by size/time.\nUsage Example:\n rclone backend import hasher:subdir md5 /path/to/sum.md5\n", - "Opts": null - }, - { - "Name": "stickyimport", - "Short": "Perform fast import of a SUM file", - "Long": "Fill hash cache from a SUM file without verifying file fingerprints.\nUsage Example:\n rclone backend stickyimport hasher:subdir md5 remote:path/to/sum.md5\n", - "Opts": null - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "anonymous", + "ValueStr": "anonymous", + "Type": "string" + }, + { + "Name": "link_type", + "FieldName": "", + "Help": "Set the type of the links created by the link command.", + "Default": "view", + "Value": null, + "Examples": [ + { + "Value": "view", + "Help": "Creates a read-only link to the item." + }, + { + "Value": "edit", + "Help": "Creates a read-write link to the item." + }, + { + "Value": "embed", + "Help": "Creates an embeddable link to the item." + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": null, - "Help": "Any metadata supported by the underlying remote is read and written." + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "view", + "ValueStr": "view", + "Type": "string" + }, + { + "Name": "link_password", + "FieldName": "", + "Help": "Set the password for links created by the link command.\n\nAt the time of writing this only works with OneDrive personal paid accounts.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "hash_type", + "FieldName": "", + "Help": "Specify the hash in use for the backend.\n\nThis specifies the hash type in use. If set to \"auto\" it will use the\ndefault hash which is QuickXorHash.\n\nBefore rclone 1.62 an SHA1 hash was used by default for Onedrive\nPersonal. For 1.62 and later the default is to use a QuickXorHash for\nall onedrive types. If an SHA1 hash is desired then set this option\naccordingly.\n\nFrom July 2023 QuickXorHash will be the only available hash for\nboth OneDrive for Business and OneDrive Personal.\n\nThis can be set to \"none\" to not use any hashes.\n\nIf the hash requested does not exist on the object, it will be\nreturned as an empty string which is treated as a missing hash by\nrclone.\n", + "Default": "auto", + "Value": null, + "Examples": [ + { + "Value": "auto", + "Help": "Rclone chooses the best hash" + }, + { + "Value": "quickxor", + "Help": "QuickXor" + }, + { + "Value": "sha1", + "Help": "SHA1" + }, + { + "Value": "sha256", + "Help": "SHA256" + }, + { + "Value": "crc32", + "Help": "CRC32" + }, + { + "Value": "none", + "Help": "None - don't use any hashes" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "auto", + "ValueStr": "auto", + "Type": "string" + }, + { + "Name": "av_override", + "FieldName": "", + "Help": "Allows download of files the server thinks has a virus.\n\nThe onedrive/sharepoint server may check files uploaded with an Anti\nVirus checker. If it detects any potential viruses or malware it will\nblock download of the file.\n\nIn this case you will see a message like this\n\n server reports this file is infected with a virus - use --onedrive-av-override to download anyway: Infected (name of virus): 403 Forbidden: \n\nIf you are 100% sure you want to download this file anyway then use\nthe --onedrive-av-override flag, or av_override = true in the config\nfile.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "delta", + "FieldName": "", + "Help": "If set rclone will use delta listing to implement recursive listings.\n\nIf this flag is set the onedrive backend will advertise `ListR`\nsupport for recursive listings.\n\nSetting this flag speeds up these things greatly:\n\n rclone lsf -R onedrive:\n rclone size onedrive:\n rclone rc vfs/refresh recursive=true\n\n**However** the delta listing API **only** works at the root of the\ndrive. If you use it not at the root then it recurses from the root\nand discards all the data that is not under the directory you asked\nfor. So it will be correct but may not be very efficient.\n\nThis is why this flag is not set as the default.\n\nAs a rule of thumb if nearly all of your data is under rclone's root\ndirectory (the `root/directory` in `onedrive:root/directory`) then\nusing this flag will be be a big performance win. If your data is\nmostly not under the root then using this flag will be a big\nperformance loss.\n\nIt is recommended if you are mounting your onedrive at the root\n(or near the root when using crypt) and using rclone `rc vfs/refresh`.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "metadata_permissions", + "FieldName": "", + "Help": "Control whether permissions should be read or written in metadata.\n\nReading permissions metadata from files can be done quickly, but it\nisn't always desirable to set the permissions from the metadata.\n", + "Default": 0, + "Value": null, + "Examples": [ + { + "Value": "off", + "Help": "Do not read or write the value" + }, + { + "Value": "read", + "Help": "Read the value only" + }, + { + "Value": "write", + "Help": "Write the value only" + }, + { + "Value": "read,write", + "Help": "Read and Write the value." + }, + { + "Value": "failok", + "Help": "If writing fails log errors only, don't fail the transfer" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "Bits" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 57386894, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "btime": { + "Help": "Time of file birth (creation) with S accuracy (mS for OneDrive Personal).", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05Z", + "ReadOnly": false + }, + "content-type": { + "Help": "The MIME type of the file.", + "Type": "string", + "Example": "text/plain", + "ReadOnly": true + }, + "created-by-display-name": { + "Help": "Display name of the user that created the item.", + "Type": "string", + "Example": "John Doe", + "ReadOnly": true + }, + "created-by-id": { + "Help": "ID of the user that created the item.", + "Type": "string", + "Example": "48d31887-5fad-4d73-a9f5-3c356e68a038", + "ReadOnly": true + }, + "description": { + "Help": "A short description of the file. Max 1024 characters. Only supported for OneDrive Personal.", + "Type": "string", + "Example": "Contract for signing", + "ReadOnly": false + }, + "id": { + "Help": "The unique identifier of the item within OneDrive.", + "Type": "string", + "Example": "01BYE5RZ6QN3ZWBTUFOFD3GSPGOHDJD36K", + "ReadOnly": true + }, + "last-modified-by-display-name": { + "Help": "Display name of the user that last modified the item.", + "Type": "string", + "Example": "John Doe", + "ReadOnly": true + }, + "last-modified-by-id": { + "Help": "ID of the user that last modified the item.", + "Type": "string", + "Example": "48d31887-5fad-4d73-a9f5-3c356e68a038", + "ReadOnly": true + }, + "malware-detected": { + "Help": "Whether OneDrive has detected that the item contains malware.", + "Type": "boolean", + "Example": "true", + "ReadOnly": true + }, + "mtime": { + "Help": "Time of last modification with S accuracy (mS for OneDrive Personal).", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05Z", + "ReadOnly": false + }, + "package-type": { + "Help": "If present, indicates that this item is a package instead of a folder or file. Packages are treated like files in some contexts and folders in others.", + "Type": "string", + "Example": "oneNote", + "ReadOnly": true + }, + "permissions": { + "Help": "Permissions in a JSON dump of OneDrive format. Enable with --onedrive-metadata-permissions. Properties: id, grantedTo, grantedToIdentities, invitation, inheritedFrom, link, roles, shareId", + "Type": "JSON", + "Example": "{}", + "ReadOnly": false + }, + "shared-by-id": { + "Help": "ID of the user that shared the item (if shared).", + "Type": "string", + "Example": "48d31887-5fad-4d73-a9f5-3c356e68a038", + "ReadOnly": true + }, + "shared-owner-id": { + "Help": "ID of the owner of the shared item (if shared).", + "Type": "string", + "Example": "48d31887-5fad-4d73-a9f5-3c356e68a038", + "ReadOnly": true + }, + "shared-scope": { + "Help": "If shared, indicates the scope of how the item is shared: anonymous, organization, or users.", + "Type": "string", + "Example": "users", + "ReadOnly": true + }, + "shared-time": { + "Help": "Time when the item was shared, with S accuracy (mS for OneDrive Personal).", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05Z", + "ReadOnly": true + }, + "utime": { + "Help": "Time of upload with S accuracy (mS for OneDrive Personal).", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05Z", + "ReadOnly": true } - }, - { - "Name": "hdfs", - "Description": "Hadoop distributed file system", - "Prefix": "hdfs", - "Options": [ - { - "Name": "namenode", - "Help": "Hadoop name node and port.\n\nE.g. \"namenode:8020\" to connect to host namenode at port 8020.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "username", - "Help": "Hadoop user name.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "root", - "Help": "Connect to hdfs as root.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "service_principal_name", - "Help": "Kerberos service principal name for the namenode.\n\nEnables KERBEROS authentication. Specifies the Service Principal Name\n(SERVICE/FQDN) for the namenode. E.g. \\\"hdfs/namenode.hadoop.docker\\\"\nfor namenode running as service 'hdfs' with FQDN 'namenode.hadoop.docker'.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "data_transfer_protection", - "Help": "Kerberos data transfer protection: authentication|integrity|privacy.\n\nSpecifies whether or not authentication, data signature integrity\nchecks, and wire encryption are required when communicating with\nthe datanodes. Possible values are 'authentication', 'integrity'\nand 'privacy'. Used only with KERBEROS enabled.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "privacy", - "Help": "Ensure authentication, integrity and encryption enabled.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50430082, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Colon,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,Colon,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + }, + "Help": "OneDrive supports System Metadata (not User Metadata, as of this writing) for\nboth files and directories. Much of the metadata is read-only, and there are some\ndifferences between OneDrive Personal and Business (see table below for\ndetails).\n\nPermissions are also supported, if `--onedrive-metadata-permissions` is set. The\naccepted values for `--onedrive-metadata-permissions` are \"`read`\", \"`write`\",\n\"`read,write`\", and \"`off`\" (the default). \"`write`\" supports adding new permissions,\nupdating the \"role\" of existing permissions, and removing permissions. Updating\nand removing require the Permission ID to be known, so it is recommended to use\n\"`read,write`\" instead of \"`write`\" if you wish to update/remove permissions.\n\nPermissions are read/written in JSON format using the same schema as the\n[OneDrive API](https://learn.microsoft.com/en-us/onedrive/developer/rest-api/resources/permission?view=odsp-graph-online),\nwhich differs slightly between OneDrive Personal and Business.\n\nExample for OneDrive Personal:\n```json\n[\n\t{\n\t\t\"id\": \"1234567890ABC!123\",\n\t\t\"grantedTo\": {\n\t\t\t\"user\": {\n\t\t\t\t\"id\": \"ryan@contoso.com\"\n\t\t\t},\n\t\t\t\"application\": {},\n\t\t\t\"device\": {}\n\t\t},\n\t\t\"invitation\": {\n\t\t\t\"email\": \"ryan@contoso.com\"\n\t\t},\n\t\t\"link\": {\n\t\t\t\"webUrl\": \"https://1drv.ms/t/s!1234567890ABC\"\n\t\t},\n\t\t\"roles\": [\n\t\t\t\"read\"\n\t\t],\n\t\t\"shareId\": \"s!1234567890ABC\"\n\t}\n]\n```\n\nExample for OneDrive Business:\n```json\n[\n\t{\n\t\t\"id\": \"48d31887-5fad-4d73-a9f5-3c356e68a038\",\n\t\t\"grantedToIdentities\": [\n\t\t\t{\n\t\t\t\t\"user\": {\n\t\t\t\t\t\"displayName\": \"ryan@contoso.com\"\n\t\t\t\t},\n\t\t\t\t\"application\": {},\n\t\t\t\t\"device\": {}\n\t\t\t}\n\t\t],\n\t\t\"link\": {\n\t\t\t\"type\": \"view\",\n\t\t\t\"scope\": \"users\",\n\t\t\t\"webUrl\": \"https://contoso.sharepoint.com/:w:/t/design/a577ghg9hgh737613bmbjf839026561fmzhsr85ng9f3hjck2t5s\"\n\t\t},\n\t\t\"roles\": [\n\t\t\t\"read\"\n\t\t],\n\t\t\"shareId\": \"u!LKj1lkdlals90j1nlkascl\"\n\t},\n\t{\n\t\t\"id\": \"5D33DD65C6932946\",\n\t\t\"grantedTo\": {\n\t\t\t\"user\": {\n\t\t\t\t\"displayName\": \"John Doe\",\n\t\t\t\t\"id\": \"efee1b77-fb3b-4f65-99d6-274c11914d12\"\n\t\t\t},\n\t\t\t\"application\": {},\n\t\t\t\"device\": {}\n\t\t},\n\t\t\"roles\": [\n\t\t\t\"owner\"\n\t\t],\n\t\t\"shareId\": \"FWxc1lasfdbEAGM5fI7B67aB5ZMPDMmQ11U\"\n\t}\n]\n```\n\nTo write permissions, pass in a \"permissions\" metadata key using this same\nformat. The [`--metadata-mapper`](https://rclone.org/docs/#metadata-mapper) tool can\nbe very helpful for this.\n\nWhen adding permissions, an email address can be provided in the `User.ID` or\n`DisplayName` properties of `grantedTo` or `grantedToIdentities`. Alternatively,\nan ObjectID can be provided in `User.ID`. At least one valid recipient must be\nprovided in order to add a permission for a user. Creating a Public Link is also\nsupported, if `Link.Scope` is set to `\"anonymous\"`.\n\nExample request to add a \"read\" permission with `--metadata-mapper`:\n\n```json\n{\n \"Metadata\": {\n \"permissions\": \"[{\\\"grantedToIdentities\\\":[{\\\"user\\\":{\\\"id\\\":\\\"ryan@contoso.com\\\"}}],\\\"roles\\\":[\\\"read\\\"]}]\"\n }\n}\n```\n\nNote that adding a permission can fail if a conflicting permission already\nexists for the file/folder.\n\nTo update an existing permission, include both the Permission ID and the new\n`roles` to be assigned. `roles` is the only property that can be changed.\n\nTo remove permissions, pass in a blob containing only the permissions you wish\nto keep (which can be empty, to remove all.) Note that the `owner` role will be\nignored, as it cannot be removed.\n\nNote that both reading and writing permissions requires extra API calls, so if\nyou don't need to read or write permissions it is recommended to omit\n`--onedrive-metadata-permissions`.\n\nMetadata and permissions are supported for Folders (directories) as well as\nFiles. Note that setting the `mtime` or `btime` on a Folder requires one extra\nAPI call on OneDrive Business only.\n\nOneDrive does not currently support User Metadata. When writing metadata, only\nwriteable system properties will be written -- any read-only or unrecognized keys\npassed in will be ignored.\n\nTIP: to see the metadata and permissions for any file or folder, run:\n\n```\nrclone lsjson remote:path --stat -M --onedrive-metadata-permissions read\n```" + } + }, + { + "Name": "opendrive", + "Description": "OpenDrive", + "Prefix": "opendrive", + "Options": [ + { + "Name": "username", + "FieldName": "", + "Help": "Username.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "Password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 62007182, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,LeftSpace,LeftCrLfHtVt,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,LeftSpace,LeftCrLfHtVt,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Files will be uploaded in chunks this size.\n\nNote that these chunks are buffered in memory so increasing them will\nincrease memory use.", + "Default": 10485760, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Mi", + "ValueStr": "10Mi", + "Type": "SizeSuffix" + }, + { + "Name": "access", + "FieldName": "", + "Help": "Files and folders will be uploaded with this access permission (default private)", + "Default": "private", + "Value": null, + "Examples": [ + { + "Value": "private", + "Help": "The file or folder access can be granted in a way that will allow select users to view, read or write what is absolutely essential for them." + }, + { + "Value": "public", + "Help": "The file or folder can be downloaded by anyone from a web browser. The link can be shared in any way," + }, + { + "Value": "hidden", + "Help": "The file or folder can be accessed has the same restrictions as Public if the user knows the URL of the file or folder link in order to access the contents" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "hidrive", - "Description": "HiDrive", - "Prefix": "hidrive", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "scope_access", - "Help": "Access permissions that rclone should use when requesting access from HiDrive.", - "Provider": "", - "Default": "rw", - "Value": null, - "Examples": [ - { - "Value": "rw", - "Help": "Read and write access to resources.", - "Provider": "" - }, - { - "Value": "ro", - "Help": "Read-only access to resources.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "rw", - "ValueStr": "rw", - "Type": "string" - }, - { - "Name": "scope_role", - "Help": "User-level that rclone should use when requesting access from HiDrive.", - "Provider": "", - "Default": "user", - "Value": null, - "Examples": [ - { - "Value": "user", - "Help": "User-level access to management permissions.\nThis will be sufficient in most cases.", - "Provider": "" - }, - { - "Value": "admin", - "Help": "Extensive access to management permissions.", - "Provider": "" - }, - { - "Value": "owner", - "Help": "Full access to management permissions.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "user", - "ValueStr": "user", - "Type": "string" - }, - { - "Name": "root_prefix", - "Help": "The root/parent folder for all paths.\n\nFill in to use the specified folder as the parent for all paths given to the remote.\nThis way rclone can use any folder as its starting point.", - "Provider": "", - "Default": "/", - "Value": null, - "Examples": [ - { - "Value": "/", - "Help": "The topmost directory accessible by rclone.\nThis will be equivalent with \"root\" if rclone uses a regular HiDrive user account.", - "Provider": "" - }, - { - "Value": "root", - "Help": "The topmost directory of the HiDrive user account", - "Provider": "" - }, - { - "Value": "", - "Help": "This specifies that there is no root-prefix for your paths.\nWhen using this you will always need to specify paths to this remote with a valid parent e.g. \"remote:/path/to/dir\" or \"remote:root/path/to/dir\".", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "/", - "ValueStr": "/", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for the service.\n\nThis is the URL that API-calls will be made to.", - "Provider": "", - "Default": "https://api.hidrive.strato.com/2.1", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "https://api.hidrive.strato.com/2.1", - "ValueStr": "https://api.hidrive.strato.com/2.1", - "Type": "string" - }, - { - "Name": "disable_fetching_member_count", - "Help": "Do not fetch number of objects in directories unless it is absolutely necessary.\n\nRequests may be faster if the number of objects in subdirectories is not fetched.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "chunk_size", - "Help": "Chunksize for chunked uploads.\n\nAny files larger than the configured cutoff (or files of unknown size) will be uploaded in chunks of this size.\n\nThe upper limit for this is 2147483647 bytes (about 2.000Gi).\nThat is the maximum amount of bytes a single upload-operation will support.\nSetting this above the upper limit or to a negative value will cause uploads to fail.\n\nSetting this to larger values may increase the upload speed at the cost of using more memory.\nIt can be set to smaller values smaller to save on memory.", - "Provider": "", - "Default": 50331648, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "48Mi", - "ValueStr": "48Mi", - "Type": "SizeSuffix" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff/Threshold for chunked uploads.\n\nAny files larger than this will be uploaded in chunks of the configured chunksize.\n\nThe upper limit for this is 2147483647 bytes (about 2.000Gi).\nThat is the maximum amount of bytes a single upload-operation will support.\nSetting this above the upper limit will cause uploads to fail.", - "Provider": "", - "Default": 100663296, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "96Mi", - "ValueStr": "96Mi", - "Type": "SizeSuffix" - }, - { - "Name": "upload_concurrency", - "Help": "Concurrency for chunked uploads.\n\nThis is the upper limit for how many transfers for the same file are running concurrently.\nSetting this above to a value smaller than 1 will cause uploads to deadlock.\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", - "Provider": "", - "Default": 4, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4", - "ValueStr": "4", - "Type": "int" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 33554434, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Dot", - "ValueStr": "Slash,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "private", + "ValueStr": "private", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "oracleobjectstorage", + "Description": "Oracle Cloud Infrastructure Object Storage", + "Prefix": "oos", + "Options": [ + { + "Name": "provider", + "FieldName": "", + "Help": "Choose your Auth Provider", + "Default": "env_auth", + "Value": null, + "Examples": [ + { + "Value": "env_auth", + "Help": "automatically pickup the credentials from runtime(env), first one to provide auth wins" + }, + { + "Value": "user_principal_auth", + "Help": "use an OCI user and an API key for authentication.\nyou’ll need to put in a config file your tenancy OCID, user OCID, region, the path, fingerprint to an API key.\nhttps://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm" + }, + { + "Value": "instance_principal_auth", + "Help": "use instance principals to authorize an instance to make API calls. \neach instance has its own identity, and authenticates using the certificates that are read from instance metadata. \nhttps://docs.oracle.com/en-us/iaas/Content/Identity/Tasks/callingservicesfrominstances.htm" + }, + { + "Value": "workload_identity_auth", + "Help": "use workload identity to grant OCI Container Engine for Kubernetes workloads policy-driven access to OCI resources using OCI Identity and Access Management (IAM).\nhttps://docs.oracle.com/en-us/iaas/Content/ContEng/Tasks/contenggrantingworkloadaccesstoresources.htm" + }, + { + "Value": "resource_principal_auth", + "Help": "use resource principals to make API calls" + }, + { + "Value": "no_auth", + "Help": "no credentials needed, this is typically for reading public buckets" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "http", - "Description": "HTTP", - "Prefix": "http", - "Options": [ - { - "Name": "url", - "Help": "URL of HTTP host to connect to.\n\nE.g. \"https://example.com\", or \"https://user:pass@example.com\" to use a username and password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "headers", - "Help": "Set HTTP headers for all transactions.\n\nUse this to set additional HTTP headers for all transactions.\n\nThe input format is comma separated list of key,value pairs. Standard\n[CSV encoding](https://godoc.org/encoding/csv) may be used.\n\nFor example, to set a Cookie use 'Cookie,name=value', or '\"Cookie\",\"name=value\"'.\n\nYou can set multiple headers, e.g. '\"Cookie\",\"name=value\",\"Authorization\",\"xxx\"'.", - "Provider": "", - "Default": [], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "CommaSepList" - }, - { - "Name": "no_slash", - "Help": "Set this if the site doesn't end directories with /.\n\nUse this if your target website does not use / on the end of\ndirectories.\n\nA / on the end of a path is how rclone normally tells the difference\nbetween files and directories. If this flag is set, then rclone will\ntreat all files with Content-Type: text/html as directories and read\nURLs from them rather than downloading them.\n\nNote that this may cause rclone to confuse genuine HTML files with\ndirectories.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_head", - "Help": "Don't use HEAD requests.\n\nHEAD requests are mainly used to find file sizes in dir listing.\nIf your site is being very slow to load then you can try this option.\nNormally rclone does a HEAD request for each potential file in a\ndirectory listing to:\n\n- find its size\n- check it really exists\n- check to see if it is a directory\n\nIf you set this option, rclone will not do the HEAD request. This will mean\nthat directory listings are much quicker, but rclone won't have the times or\nsizes of any files, and some files that don't exist may be in the listing.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - } + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "env_auth", + "ValueStr": "env_auth", + "Type": "string" + }, + { + "Name": "namespace", + "FieldName": "", + "Help": "Object storage namespace", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "compartment", + "FieldName": "", + "Help": "Specify compartment OCID, if you need to list buckets.\n\nList objects works without compartment OCID.", + "Provider": "!no_auth", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Object storage Region", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Object storage API.\n\nLeave blank to use the default endpoint for the region.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "config_file", + "FieldName": "", + "Help": "Path to OCI config file", + "Provider": "user_principal_auth", + "Default": "~/.oci/config", + "Value": null, + "Examples": [ + { + "Value": "~/.oci/config", + "Help": "oci configuration file location" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "internetarchive", - "Description": "Internet Archive", - "Prefix": "internetarchive", - "Options": [ - { - "Name": "access_key_id", - "Help": "IAS3 Access Key.\n\nLeave blank for anonymous access.\nYou can find one here: https://archive.org/account/s3.php", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "secret_access_key", - "Help": "IAS3 Secret Key (password).\n\nLeave blank for anonymous access.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "IAS3 Endpoint.\n\nLeave blank for default value.", - "Provider": "", - "Default": "https://s3.us.archive.org", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "https://s3.us.archive.org", - "ValueStr": "https://s3.us.archive.org", - "Type": "string" - }, - { - "Name": "front_endpoint", - "Help": "Host of InternetArchive Frontend.\n\nLeave blank for default value.", - "Provider": "", - "Default": "https://archive.org", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "https://archive.org", - "ValueStr": "https://archive.org", - "Type": "string" - }, - { - "Name": "disable_checksum", - "Help": "Don't ask the server to test against MD5 checksum calculated by rclone.\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can ask the server to check the object against checksum.\nThis is great for data integrity checking but can cause long delays for\nlarge files to start uploading.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "wait_archive", - "Help": "Timeout for waiting the server's processing tasks (specifically archive and book_op) to finish.\nOnly enable if you need to be guaranteed to be reflected after write operations.\n0 to disable waiting. No errors to be thrown in case of timeout.", - "Provider": "", - "Default": 0, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "0s", - "ValueStr": "0s", - "Type": "Duration" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50446342, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,CrLf,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,CrLf,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "~/.oci/config", + "ValueStr": "~/.oci/config", + "Type": "string" + }, + { + "Name": "config_profile", + "FieldName": "", + "Help": "Profile name inside the oci config file", + "Provider": "user_principal_auth", + "Default": "Default", + "Value": null, + "Examples": [ + { + "Value": "Default", + "Help": "Use the default profile" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": { - "crc32": { - "Help": "CRC32 calculated by Internet Archive", - "Type": "string", - "Example": "01234567", - "ReadOnly": true - }, - "format": { - "Help": "Name of format identified by Internet Archive", - "Type": "string", - "Example": "Comma-Separated Values", - "ReadOnly": true - }, - "md5": { - "Help": "MD5 hash calculated by Internet Archive", - "Type": "string", - "Example": "01234567012345670123456701234567", - "ReadOnly": true - }, - "mtime": { - "Help": "Time of last modification, managed by Rclone", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z", - "ReadOnly": true - }, - "name": { - "Help": "Full file path, without the bucket part", - "Type": "filename", - "Example": "backend/internetarchive/internetarchive.go", - "ReadOnly": true - }, - "old_version": { - "Help": "Whether the file was replaced and moved by keep-old-version flag", - "Type": "boolean", - "Example": "true", - "ReadOnly": true - }, - "rclone-ia-mtime": { - "Help": "Time of last modification, managed by Internet Archive", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z", - "ReadOnly": false - }, - "rclone-mtime": { - "Help": "Time of last modification, managed by Rclone", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z", - "ReadOnly": false - }, - "rclone-update-track": { - "Help": "Random value used by Rclone for tracking changes inside Internet Archive", - "Type": "string", - "Example": "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - "ReadOnly": false - }, - "sha1": { - "Help": "SHA1 hash calculated by Internet Archive", - "Type": "string", - "Example": "0123456701234567012345670123456701234567", - "ReadOnly": true - }, - "size": { - "Help": "File size in bytes", - "Type": "decimal number", - "Example": "123456", - "ReadOnly": true - }, - "source": { - "Help": "The source of the file", - "Type": "string", - "Example": "original", - "ReadOnly": true - }, - "summation": { - "Help": "Check https://forum.rclone.org/t/31922 for how it is used", - "Type": "string", - "Example": "md5", - "ReadOnly": true - }, - "viruscheck": { - "Help": "The last time viruscheck process was run for the file (?)", - "Type": "unixtime", - "Example": "1654191352", - "ReadOnly": true - } - }, - "Help": "Metadata fields provided by Internet Archive.\nIf there are multiple values for a key, only the first one is returned.\nThis is a limitation of Rclone, that supports one value per one key.\n\nOwner is able to add custom keys. Metadata feature grabs all the keys including them.\n" - } - }, - { - "Name": "jottacloud", - "Description": "Jottacloud", - "Prefix": "jottacloud", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "md5_memory_limit", - "Help": "Files bigger than this will be cached on disk to calculate the MD5 if required.", - "Provider": "", - "Default": 10485760, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10Mi", - "ValueStr": "10Mi", - "Type": "SizeSuffix" - }, - { - "Name": "trashed_only", - "Help": "Only show files that are in the trash.\n\nThis will show trashed files in their original directory structure.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "hard_delete", - "Help": "Delete files permanently rather than putting them into the trash.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "upload_resume_limit", - "Help": "Files bigger than this can be resumed if the upload fail's.", - "Provider": "", - "Default": 10485760, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10Mi", - "ValueStr": "10Mi", - "Type": "SizeSuffix" - }, - { - "Name": "no_versions", - "Help": "Avoid server side versioning by deleting files and recreating files instead of overwriting them.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50431886, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Default", + "ValueStr": "Default", + "Type": "string" + }, + { + "Name": "storage_tier", + "FieldName": "", + "Help": "The storage class to use when storing new objects in storage. https://docs.oracle.com/en-us/iaas/Content/Object/Concepts/understandingstoragetiers.htm", + "Default": "Standard", + "Value": null, + "Examples": [ + { + "Value": "Standard", + "Help": "Standard storage tier, this is the default tier" + }, + { + "Value": "InfrequentAccess", + "Help": "InfrequentAccess storage tier" + }, + { + "Value": "Archive", + "Help": "Archive storage tier" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Standard", + "ValueStr": "Standard", + "Type": "string" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to chunked upload.\n\nAny files larger than this will be uploaded in chunks of chunk_size.\nThe minimum is 0 and the maximum is 5 GiB.", + "Default": 209715200, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "200Mi", + "ValueStr": "200Mi", + "Type": "SizeSuffix" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Chunk size to use for uploading.\n\nWhen uploading files larger than upload_cutoff or files with unknown\nsize (e.g. from \"rclone rcat\" or uploaded with \"rclone mount\" they will be uploaded \nas multipart uploads using this chunk size.\n\nNote that \"upload_concurrency\" chunks of this size are buffered\nin memory per transfer.\n\nIf you are transferring large files over high-speed links and you have\nenough memory, then increasing this will speed up the transfers.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of known size to stay below the 10,000 chunks limit.\n\nFiles of unknown size are uploaded with the configured\nchunk_size. Since the default chunk size is 5 MiB and there can be at\nmost 10,000 chunks, this means that by default the maximum size of\na file you can stream upload is 48 GiB. If you wish to stream upload\nlarger files then you will need to increase chunk_size.\n\nIncreasing the chunk size decreases the accuracy of the progress\nstatistics displayed with \"-P\" flag.\n", + "Default": 5242880, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "5Mi", + "ValueStr": "5Mi", + "Type": "SizeSuffix" + }, + { + "Name": "max_upload_parts", + "FieldName": "", + "Help": "Maximum number of parts in a multipart upload.\n\nThis option defines the maximum number of multipart chunks to use\nwhen doing a multipart upload.\n\nOCI has max parts limit of 10,000 chunks.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of a known size to stay below this number of chunks limit.\n", + "Default": 10000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10000", + "ValueStr": "10000", + "Type": "int" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", + "Default": 10, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10", + "ValueStr": "10", + "Type": "int" + }, + { + "Name": "copy_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to multipart copy.\n\nAny files larger than this that need to be server-side copied will be\ncopied in chunks of this size.\n\nThe minimum is 0 and the maximum is 5 GiB.", + "Default": 4999610368, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4.656Gi", + "ValueStr": "4.656Gi", + "Type": "SizeSuffix" + }, + { + "Name": "copy_timeout", + "FieldName": "", + "Help": "Timeout for copy.\n\nCopy is an asynchronous operation, specify timeout to wait for copy to succeed\n", + "Default": 60000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "disable_checksum", + "FieldName": "", + "Help": "Don't store MD5 checksum with object metadata.\n\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50331650, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,InvalidUtf8,Dot", + "ValueStr": "Slash,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "leave_parts_on_error", + "FieldName": "", + "Help": "If true avoid calling abort upload on a failure, leaving all successfully uploaded parts for manual recovery.\n\nIt should be set to true for resuming uploads across different sessions.\n\nWARNING: Storing parts of an incomplete multipart upload counts towards space usage on object storage and will add\nadditional costs if not cleaned up.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "attempt_resume_upload", + "FieldName": "", + "Help": "If true attempt to resume previously started multipart upload for the object.\nThis will be helpful to speed up multipart transfers by resuming uploads from past session.\n\nWARNING: If chunk size differs in resumed session from past incomplete session, then the resumed multipart upload is \naborted and a new multipart upload is started with the new chunk size.\n\nThe flag leave_parts_on_error must be true to resume and optimize to skip parts that were already uploaded successfully.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_check_bucket", + "FieldName": "", + "Help": "If set, don't attempt to check the bucket exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the bucket exists already.\n\nIt can also be needed if the user you are using does not have bucket\ncreation permissions.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "sse_customer_key_file", + "FieldName": "", + "Help": "To use SSE-C, a file containing the base64-encoded string of the AES-256 encryption key associated\nwith the object. Please note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is needed.'", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_customer_key", + "FieldName": "", + "Help": "To use SSE-C, the optional header that specifies the base64-encoded 256-bit encryption key to use to\nencrypt or decrypt the data. Please note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is\nneeded. For more information, see Using Your Own Keys for Server-Side Encryption \n(https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm)", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "koofr", - "Description": "Koofr, Digi Storage and other Koofr-compatible storage providers", - "Prefix": "koofr", - "Options": [ - { - "Name": "provider", - "Help": "Choose your storage provider.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "koofr", - "Help": "Koofr, https://app.koofr.net/", - "Provider": "" - }, - { - "Value": "digistorage", - "Help": "Digi Storage, https://storage.rcs-rds.ro/", - "Provider": "" - }, - { - "Value": "other", - "Help": "Any other Koofr API compatible storage service", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "The Koofr API endpoint to use.", - "Provider": "other", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "mountid", - "Help": "Mount ID of the mount to use.\n\nIf omitted, the primary mount is used.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "setmtime", - "Help": "Does the backend support setting modification time.\n\nSet this to false if you use a mount ID that points to a Dropbox or Amazon Drive backend.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "user", - "Help": "Your user name.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password", - "Help": "Your password for rclone (generate one at https://app.koofr.net/app/admin/preferences/password).", - "Provider": "koofr", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password", - "Help": "Your password for rclone (generate one at https://storage.rcs-rds.ro/app/admin/preferences/password).", - "Provider": "digistorage", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password", - "Help": "Your password for rclone (generate one at your service's settings page).", - "Provider": "other", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50438146, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_customer_key_sha256", + "FieldName": "", + "Help": "If using SSE-C, The optional header that specifies the base64-encoded SHA256 hash of the encryption\nkey. This value is used to check the integrity of the encryption key. see Using Your Own Keys for \nServer-Side Encryption (https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm).", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "local", - "Description": "Local Disk", - "Prefix": "local", - "Options": [ - { - "Name": "nounc", - "Help": "Disable UNC (long path names) conversion on Windows.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "true", - "Help": "Disables long file names.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "copy_links", - "Help": "Follow symlinks and copy the pointed to item.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "L", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": true, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "links", - "Help": "Translate symlinks to/from regular files with a '.rclonelink' extension.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "l", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": true, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "skip_links", - "Help": "Don't warn about skipped symlinks.\n\nThis flag disables warning messages on skipped symlinks or junction\npoints, as you explicitly acknowledge that they should be skipped.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": true, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "zero_size_links", - "Help": "Assume the Stat size of links is zero (and read them instead) (deprecated).\n\nRclone used to use the Stat size of links as the link size, but this fails in quite a few places:\n\n- Windows\n- On some virtual filesystems (such ash LucidLink)\n- Android\n\nSo rclone now always reads the link.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "unicode_normalization", - "Help": "Apply unicode NFC normalization to paths and filenames.\n\nThis flag can be used to normalize file names into unicode NFC form\nthat are read from the local filesystem.\n\nRclone does not normally touch the encoding of file names it reads from\nthe file system.\n\nThis can be useful when using macOS as it normally provides decomposed (NFD)\nunicode which in some language (eg Korean) doesn't display properly on\nsome OSes.\n\nNote that rclone compares filenames with unicode normalization in the sync\nroutine so this flag shouldn't normally be used.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_check_updated", - "Help": "Don't check to see if the files change during upload.\n\nNormally rclone checks the size and modification time of files as they\nare being uploaded and aborts with a message which starts \"can't copy -\nsource file is being updated\" if the file changes during upload.\n\nHowever on some file systems this modification time check may fail (e.g.\n[Glusterfs #2206](https://github.com/rclone/rclone/issues/2206)) so this\ncheck can be disabled with this flag.\n\nIf this flag is set, rclone will use its best efforts to transfer a\nfile which is being updated. If the file is only having things\nappended to it (e.g. a log) then rclone will transfer the log file with\nthe size it had the first time rclone saw it.\n\nIf the file is being modified throughout (not just appended to) then\nthe transfer may fail with a hash check failure.\n\nIn detail, once the file has had stat() called on it for the first\ntime we:\n\n- Only transfer the size that stat gave\n- Only checksum the size that stat gave\n- Don't update the stat info for the file\n\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "one_file_system", - "Help": "Don't cross filesystem boundaries (unix/macOS only).", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "x", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": true, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "case_sensitive", - "Help": "Force the filesystem to report itself as case sensitive.\n\nNormally the local backend declares itself as case insensitive on\nWindows/macOS and case sensitive for everything else. Use this flag\nto override the default choice.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "case_insensitive", - "Help": "Force the filesystem to report itself as case insensitive.\n\nNormally the local backend declares itself as case insensitive on\nWindows/macOS and case sensitive for everything else. Use this flag\nto override the default choice.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_preallocate", - "Help": "Disable preallocation of disk space for transferred files.\n\nPreallocation of disk space helps prevent filesystem fragmentation.\nHowever, some virtual filesystem layers (such as Google Drive File\nStream) may incorrectly set the actual file size equal to the\npreallocated space, causing checksum and file size checks to fail.\nUse this flag to disable preallocation.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_sparse", - "Help": "Disable sparse files for multi-thread downloads.\n\nOn Windows platforms rclone will make sparse files when doing\nmulti-thread downloads. This avoids long pauses on large files where\nthe OS zeros the file. However sparse files may be undesirable as they\ncause disk fragmentation and can be slow to work with.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_set_modtime", - "Help": "Disable setting modtime.\n\nNormally rclone updates modification time of files after they are done\nuploading. This can cause permissions issues on Linux platforms when \nthe user rclone is running as does not own the file uploaded, such as\nwhen copying to a CIFS mount owned by another user. If this option is \nenabled, rclone will no longer update the modtime after copying a file.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 33554434, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Dot", - "ValueStr": "Slash,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_kms_key_id", + "FieldName": "", + "Help": "if using your own master key in vault, this header specifies the\nOCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of a master encryption key used to call\nthe Key Management service to generate a data encryption key or to encrypt or decrypt a data encryption key.\nPlease note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is needed.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + } ], - "CommandHelp": [ - { - "Name": "noop", - "Short": "A null operation for testing backend commands", - "Long": "This is a test command which has some options\nyou can try to change the output.", - "Opts": { - "echo": "echo the input arguments", - "error": "return an error based on option value" - } - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_customer_algorithm", + "FieldName": "", + "Help": "If using SSE-C, the optional header that specifies \"AES256\" as the encryption algorithm.\nObject Storage supports \"AES256\" as the encryption algorithm. For more information, see\nUsing Your Own Keys for Server-Side Encryption (https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm).", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + }, + { + "Value": "AES256", + "Help": "AES256" + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": { - "atime": { - "Help": "Time of last access", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z07:00", - "ReadOnly": false - }, - "btime": { - "Help": "Time of file birth (creation)", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z07:00", - "ReadOnly": false - }, - "gid": { - "Help": "Group ID of owner", - "Type": "decimal number", - "Example": "500", - "ReadOnly": false - }, - "mode": { - "Help": "File type and mode", - "Type": "octal, unix style", - "Example": "0100664", - "ReadOnly": false - }, - "mtime": { - "Help": "Time of last modification", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z07:00", - "ReadOnly": false - }, - "rdev": { - "Help": "Device ID (if special file)", - "Type": "hexadecimal", - "Example": "1abc", - "ReadOnly": false - }, - "uid": { - "Help": "User ID of owner", - "Type": "decimal number", - "Example": "500", - "ReadOnly": false - } - }, - "Help": "Depending on which OS is in use the local backend may return only some\nof the system metadata. Setting system metadata is supported on all\nOSes but setting user metadata is only supported on linux, freebsd,\nnetbsd, macOS and Solaris. It is **not** supported on Windows yet\n([see pkg/attrs#47](https://github.com/pkg/xattr/issues/47)).\n\nUser metadata is stored as extended attributes (which may not be\nsupported by all file systems) under the \"user.*\" prefix.\n" + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "rename", + "Short": "change the name of an object", + "Long": "This command can be used to rename a object.\n\nUsage Examples:\n\n rclone backend rename oos:bucket relative-object-path-under-bucket object-new-name\n", + "Opts": null + }, + { + "Name": "list-multipart-uploads", + "Short": "List the unfinished multipart uploads", + "Long": "This command lists the unfinished multipart uploads in JSON format.\n\n rclone backend list-multipart-uploads oos:bucket/path/to/object\n\nIt returns a dictionary of buckets with values as lists of unfinished\nmultipart uploads.\n\nYou can call it with no bucket in which case it lists all bucket, with\na bucket or with a bucket and path.\n\n {\n \"test-bucket\": [\n {\n \"namespace\": \"test-namespace\",\n \"bucket\": \"test-bucket\",\n \"object\": \"600m.bin\",\n \"uploadId\": \"51dd8114-52a4-b2f2-c42f-5291f05eb3c8\",\n \"timeCreated\": \"2022-07-29T06:21:16.595Z\",\n \"storageTier\": \"Standard\"\n }\n ]\n", + "Opts": null + }, + { + "Name": "cleanup", + "Short": "Remove unfinished multipart uploads.", + "Long": "This command removes unfinished multipart uploads of age greater than\nmax-age which defaults to 24 hours.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup oos:bucket/path/to/object\n rclone backend cleanup -o max-age=7w oos:bucket/path/to/object\n\nDurations are parsed as per the rest of rclone, 2h, 7d, 7w etc.\n", + "Opts": { + "max-age": "Max age of upload to delete" } - }, - { - "Name": "mailru", - "Description": "Mail.ru Cloud", - "Prefix": "mailru", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "User name (usually email).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "pass", - "Help": "Password.\n\nThis must be an app password - rclone will not work with your normal\npassword. See the Configuration section in the docs for how to make an\napp password.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "speedup_enable", - "Help": "Skip full upload if there is another file with same data hash.\n\nThis feature is called \"speedup\" or \"put by hash\". It is especially efficient\nin case of generally available files like popular books, video or audio clips,\nbecause files are searched by hash in all accounts of all mailru users.\nIt is meaningless and ineffective if source file is unique or encrypted.\nPlease note that rclone may need local memory and disk space to calculate\ncontent hash in advance and decide whether full upload is required.\nAlso, if rclone does not know file size in advance (e.g. in case of\nstreaming or partial uploads), it will not even try this optimization.", - "Provider": "", - "Default": true, - "Value": null, - "Examples": [ - { - "Value": "true", - "Help": "Enable", - "Provider": "" - }, - { - "Value": "false", - "Help": "Disable", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "speedup_file_patterns", - "Help": "Comma separated list of file name patterns eligible for speedup (put by hash).\n\nPatterns are case insensitive and can contain '*' or '?' meta characters.", - "Provider": "", - "Default": "*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Empty list completely disables speedup (put by hash).", - "Provider": "" - }, - { - "Value": "*", - "Help": "All files will be attempted for speedup.", - "Provider": "" - }, - { - "Value": "*.mkv,*.avi,*.mp4,*.mp3", - "Help": "Only common audio/video files will be tried for put by hash.", - "Provider": "" - }, - { - "Value": "*.zip,*.gz,*.rar,*.pdf", - "Help": "Only common archives or PDF books will be tried for speedup.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf", - "ValueStr": "*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf", - "Type": "string" - }, - { - "Name": "speedup_max_disk", - "Help": "This option allows you to disable speedup (put by hash) for large files.\n\nReason is that preliminary hashing can exhaust your RAM or disk space.", - "Provider": "", - "Default": 3221225472, - "Value": null, - "Examples": [ - { - "Value": "0", - "Help": "Completely disable speedup (put by hash).", - "Provider": "" - }, - { - "Value": "1G", - "Help": "Files larger than 1Gb will be uploaded directly.", - "Provider": "" - }, - { - "Value": "3G", - "Help": "Choose this option if you have less than 3Gb free on local disk.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "3Gi", - "ValueStr": "3Gi", - "Type": "SizeSuffix" - }, - { - "Name": "speedup_max_memory", - "Help": "Files larger than the size given below will always be hashed on disk.", - "Provider": "", - "Default": 33554432, - "Value": null, - "Examples": [ - { - "Value": "0", - "Help": "Preliminary hashing will always be done in a temporary disk location.", - "Provider": "" - }, - { - "Value": "32M", - "Help": "Do not dedicate more than 32Mb RAM for preliminary hashing.", - "Provider": "" - }, - { - "Value": "256M", - "Help": "You have at most 256Mb RAM free for hash calculations.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "32Mi", - "ValueStr": "32Mi", - "Type": "SizeSuffix" - }, - { - "Name": "check_hash", - "Help": "What should copy do if file checksum is mismatched or invalid.", - "Provider": "", - "Default": true, - "Value": null, - "Examples": [ - { - "Value": "true", - "Help": "Fail with error.", - "Provider": "" - }, - { - "Value": "false", - "Help": "Ignore and continue.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "user_agent", - "Help": "HTTP user agent used internally by client.\n\nDefaults to \"rclone/VERSION\" or \"--user-agent\" provided on command line.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "quirks", - "Help": "Comma separated list of internal maintenance flags.\n\nThis option must not be used by an ordinary user. It is intended only to\nfacilitate remote troubleshooting of backend issues. Strict meaning of\nflags is not documented and not guaranteed to persist between releases.\nQuirks will be removed when the backend grows stable.\nSupported quirks: atomicmkdir binlist unknowndirs", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50440078, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + }, + { + "Name": "restore", + "Short": "Restore objects from Archive to Standard storage", + "Long": "This command can be used to restore one or more objects from Archive to Standard storage.\n\n\tUsage Examples:\n\n rclone backend restore oos:bucket/path/to/directory -o hours=HOURS\n rclone backend restore oos:bucket -o hours=HOURS\n\nThis flag also obeys the filters. Test first with --interactive/-i or --dry-run flags\n\n\trclone --interactive backend restore --include \"*.txt\" oos:bucket/path -o hours=72\n\nAll the objects shown will be marked for restore, then\n\n\trclone backend restore --include \"*.txt\" oos:bucket/path -o hours=72\n\n\tIt returns a list of status dictionaries with Object Name and Status\n\tkeys. The Status will be \"RESTORED\"\" if it was successful or an error message\n\tif not.\n\n\t[\n\t\t{\n\t\t\t\"Object\": \"test.txt\"\n\t\t\t\"Status\": \"RESTORED\",\n\t\t},\n\t\t{\n\t\t\t\"Object\": \"test/file4.txt\"\n\t\t\t\"Status\": \"RESTORED\",\n\t\t}\n\t]\n", + "Opts": { + "hours": "The number of hours for which this object will be restored. Default is 24 hrs." + } + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "pcloud", + "Description": "Pcloud", + "Prefix": "pcloud", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438146, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "Fill in for rclone to use a non root folder as its starting point.", + "Default": "d0", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "d0", + "ValueStr": "d0", + "Type": "string" + }, + { + "Name": "hostname", + "FieldName": "", + "Help": "Hostname to connect to.\n\nThis is normally set when rclone initially does the oauth connection,\nhowever you will need to set it by hand if you are using remote config\nwith rclone authorize.\n", + "Default": "api.pcloud.com", + "Value": null, + "Examples": [ + { + "Value": "api.pcloud.com", + "Help": "Original/US region" + }, + { + "Value": "eapi.pcloud.com", + "Help": "EU region" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "mega", - "Description": "Mega", - "Prefix": "mega", - "Options": [ - { - "Name": "user", - "Help": "User name.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "pass", - "Help": "Password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "debug", - "Help": "Output more debug from Mega.\n\nIf this flag is set (along with -vv) it will print further debugging\ninformation from the mega backend.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "hard_delete", - "Help": "Delete files permanently rather than putting them into the trash.\n\nNormally the mega backend will put all deletions into the trash rather\nthan permanently deleting them. If you specify this then rclone will\npermanently delete objects instead.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "use_https", - "Help": "Use HTTPS for transfers.\n\nMEGA uses plain text HTTP connections by default.\nSome ISPs throttle HTTP connections, this causes transfers to become very slow.\nEnabling this will force MEGA to use HTTPS for all transfers.\nHTTPS is normally not necessary since all data is already encrypted anyway.\nEnabling it will increase CPU usage and add network overhead.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50331650, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,InvalidUtf8,Dot", - "ValueStr": "Slash,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "api.pcloud.com", + "ValueStr": "api.pcloud.com", + "Type": "string" + }, + { + "Name": "username", + "FieldName": "", + "Help": "Your pcloud username.\n\t\t\t\nThis is only required when you want to use the cleanup command. Due to a bug\nin the pcloud API the required API does not support OAuth authentication so\nwe have to rely on user password authentication for it.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "Your pcloud password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "pikpak", + "Description": "PikPak", + "Prefix": "pikpak", + "Options": [ + { + "Name": "user", + "FieldName": "", + "Help": "Pikpak username.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "Pikpak password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "device_id", + "FieldName": "", + "Help": "Device ID used for authorization.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user_agent", + "FieldName": "", + "Help": "HTTP user agent for pikpak.\n\nDefaults to \"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/129.0\" or \"--pikpak-user-agent\" provided on command line.", + "Default": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/129.0", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/129.0", + "ValueStr": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/129.0", + "Type": "string" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "ID of the root folder.\nLeave blank normally.\n\nFill in for rclone to use a non root folder as its starting point.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "use_trash", + "FieldName": "", + "Help": "Send files to the trash instead of deleting permanently.\n\nDefaults to true, namely sending files to the trash.\nUse `--pikpak-use-trash=false` to delete files permanently instead.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "trashed_only", + "FieldName": "", + "Help": "Only show files that are in the trash.\n\nThis will show trashed files in their original directory structure.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_media_link", + "FieldName": "", + "Help": "Use original file links instead of media links.\n\nThis avoids issues caused by invalid media links, but may reduce download speeds.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "hash_memory_limit", + "FieldName": "", + "Help": "Files bigger than this will be cached on disk to calculate hash if required.", + "Default": 10485760, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Mi", + "ValueStr": "10Mi", + "Type": "SizeSuffix" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Chunk size for multipart uploads.\n\t\nLarge files will be uploaded in chunks of this size.\n\nNote that this is stored in memory and there may be up to\n\"--transfers\" * \"--pikpak-upload-concurrency\" chunks stored at once\nin memory.\n\nIf you are transferring large files over high-speed links and you have\nenough memory, then increasing this will speed up the transfers.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of known size to stay below the 10,000 chunks limit.\n\nIncreasing the chunk size decreases the accuracy of the progress\nstatistics displayed with \"-P\" flag.", + "Default": 5242880, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "5Mi", + "ValueStr": "5Mi", + "Type": "SizeSuffix" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently for multipart uploads.\n\nNote that chunks are stored in memory and there may be up to\n\"--transfers\" * \"--pikpak-upload-concurrency\" chunks stored at once\nin memory.\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", + "Default": 5, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "5", + "ValueStr": "5", + "Type": "int" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 56829838, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,RightSpace,RightPeriod,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,RightSpace,RightPeriod,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "addurl", + "Short": "Add offline download task for url", + "Long": "This command adds offline download task for url.\n\nUsage:\n\n rclone backend addurl pikpak:dirpath url\n\nDownloads will be stored in 'dirpath'. If 'dirpath' is invalid, \ndownload will fallback to default 'My Pack' folder.\n", + "Opts": null + }, + { + "Name": "decompress", + "Short": "Request decompress of a file/files in a folder", + "Long": "This command requests decompress of file/files in a folder.\n\nUsage:\n\n rclone backend decompress pikpak:dirpath {filename} -o password=password\n rclone backend decompress pikpak:dirpath {filename} -o delete-src-file\n\nAn optional argument 'filename' can be specified for a file located in \n'pikpak:dirpath'. You may want to pass '-o password=password' for a \npassword-protected files. Also, pass '-o delete-src-file' to delete \nsource files after decompression finished.\n\nResult:\n\n {\n \"Decompressed\": 17,\n \"SourceDeleted\": 0,\n \"Errors\": 0\n }\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "pixeldrain", + "Description": "Pixeldrain Filesystem", + "Prefix": "pixeldrain", + "Options": [ + { + "Name": "api_key", + "FieldName": "", + "Help": "API key for your pixeldrain account.\nFound on https://pixeldrain.com/user/api_keys.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "Root of the filesystem to use.\n\nSet to 'me' to use your personal filesystem. Set to a shared directory ID to use a shared directory.", + "Default": "me", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "me", + "ValueStr": "me", + "Type": "string" + }, + { + "Name": "api_url", + "FieldName": "", + "Help": "The API endpoint to connect to. In the vast majority of cases it's fine to leave\nthis at default. It is only intended to be changed for testing purposes.", + "Default": "https://pixeldrain.com/api", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "https://pixeldrain.com/api", + "ValueStr": "https://pixeldrain.com/api", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "btime": { + "Help": "Time of file birth (creation)", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + }, + "mode": { + "Help": "File mode", + "Type": "octal, unix style", + "Example": "755", + "ReadOnly": false + }, + "mtime": { + "Help": "Time of last modification", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + } + }, + "Help": "Pixeldrain supports file modes and creation times." + } + }, + { + "Name": "premiumizeme", + "Description": "premiumize.me", + "Prefix": "premiumizeme", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "api_key", + "FieldName": "", + "Help": "API Key.\n\nThis is not normally used - use oauth instead.\n", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438154, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,DoubleQuote,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,DoubleQuote,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "protondrive", + "Description": "Proton Drive", + "Prefix": "protondrive", + "Options": [ + { + "Name": "username", + "FieldName": "", + "Help": "The username of your proton account", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "The password of your proton account.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "mailbox_password", + "FieldName": "", + "Help": "The mailbox password of your two-password proton account.\n\nFor more information regarding the mailbox password, please check the \nfollowing official knowledge base article: \nhttps://proton.me/support/the-difference-between-the-mailbox-password-and-login-password\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "2fa", + "FieldName": "", + "Help": "The 2FA code\n\nThe value can also be provided with --protondrive-2fa=000000\n\nThe 2FA code of your proton drive account if the account is set up with \ntwo-factor authentication", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_uid", + "FieldName": "", + "Help": "Client uid key (internal use only)", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_access_token", + "FieldName": "", + "Help": "Client access token key (internal use only)", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_refresh_token", + "FieldName": "", + "Help": "Client refresh token key (internal use only)", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_salted_key_pass", + "FieldName": "", + "Help": "Client salted key pass key (internal use only)", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 52559874, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LeftSpace,RightSpace,InvalidUtf8,Dot", + "ValueStr": "Slash,LeftSpace,RightSpace,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "original_file_size", + "FieldName": "", + "Help": "Return the file size before encryption\n\t\t\t\nThe size of the encrypted file will be different from (bigger than) the \noriginal file size. Unless there is a reason to return the file size \nafter encryption is performed, otherwise, set this option to true, as \nfeatures like Open() which will need to be supplied with original content \nsize, will fail to operate properly", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "app_version", + "FieldName": "", + "Help": "The app version string \n\nThe app version string indicates the client that is currently performing \nthe API request. This information is required and will be sent with every \nAPI request.", + "Default": "macos-drive@1.0.0-alpha.1+rclone", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "macos-drive@1.0.0-alpha.1+rclone", + "ValueStr": "macos-drive@1.0.0-alpha.1+rclone", + "Type": "string" + }, + { + "Name": "replace_existing_draft", + "FieldName": "", + "Help": "Create a new revision when filename conflict is detected\n\nWhen a file upload is cancelled or failed before completion, a draft will be \ncreated and the subsequent upload of the same file to the same location will be \nreported as a conflict.\n\nThe value can also be set by --protondrive-replace-existing-draft=true\n\nIf the option is set to true, the draft will be replaced and then the upload \noperation will restart. If there are other clients also uploading at the same \nfile location at the same time, the behavior is currently unknown. Need to set \nto true for integration tests.\nIf the option is set to false, an error \"a draft exist - usually this means a \nfile is being uploaded at another client, or, there was a failed upload attempt\" \nwill be returned, and no upload will happen.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "enable_caching", + "FieldName": "", + "Help": "Caches the files and folders metadata to reduce API calls\n\nNotice: If you are mounting ProtonDrive as a VFS, please disable this feature, \nas the current implementation doesn't update or clear the cache when there are \nexternal changes. \n\nThe files and folders on ProtonDrive are represented as links with keyrings, \nwhich can be cached to improve performance and be friendly to the API server.\n\nThe cache is currently built for the case when the rclone is the only instance \nperforming operations to the mount point. The event system, which is the proton\nAPI system that provides visibility of what has changed on the drive, is yet \nto be implemented, so updates from other clients won’t be reflected in the \ncache. Thus, if there are concurrent clients accessing the same mount point, \nthen we might have a problem with caching the stale data.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "putio", + "Description": "Put.io", + "Prefix": "putio", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438146, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "qingstor", + "Description": "QingCloud Object Storage", + "Prefix": "qingstor", + "Options": [ + { + "Name": "env_auth", + "FieldName": "", + "Help": "Get QingStor credentials from runtime.\n\nOnly applies if access_key_id and secret_access_key is blank.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "false", + "Help": "Enter QingStor credentials in the next step." + }, + { + "Value": "true", + "Help": "Get QingStor credentials from the environment (env vars or IAM)." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "memory", - "Description": "In memory object storage system.", - "Prefix": "memory", - "Options": [], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "netstorage", - "Description": "Akamai NetStorage", - "Prefix": "netstorage", - "Options": [ - { - "Name": "protocol", - "Help": "Select between HTTP or HTTPS protocol.\n\nMost users should choose HTTPS, which is the default.\nHTTP is provided primarily for debugging purposes.", - "Provider": "", - "Default": "https", - "Value": null, - "Examples": [ - { - "Value": "http", - "Help": "HTTP protocol", - "Provider": "" - }, - { - "Value": "https", - "Help": "HTTPS protocol", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "https", - "ValueStr": "https", - "Type": "string" - }, - { - "Name": "host", - "Help": "Domain+path of NetStorage host to connect to.\n\nFormat should be `\u003cdomain\u003e/\u003cinternal folders\u003e`", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "account", - "Help": "Set the NetStorage account name", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "secret", - "Help": "Set the NetStorage account secret/G2O key for authentication.\n\nPlease choose the 'y' option to set your own password then enter your secret.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "access_key_id", + "FieldName": "", + "Help": "QingStor Access Key ID.\n\nLeave blank for anonymous access or runtime credentials.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "secret_access_key", + "FieldName": "", + "Help": "QingStor Secret Access Key (password).\n\nLeave blank for anonymous access or runtime credentials.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Enter an endpoint URL to connection QingStor API.\n\nLeave blank will use the default value \"https://qingstor.com:443\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "zone", + "FieldName": "", + "Help": "Zone to connect to.\n\nDefault is \"pek3a\".", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "pek3a", + "Help": "The Beijing (China) Three Zone.\nNeeds location constraint pek3a." + }, + { + "Value": "sh1a", + "Help": "The Shanghai (China) First Zone.\nNeeds location constraint sh1a." + }, + { + "Value": "gd2a", + "Help": "The Guangdong (China) Second Zone.\nNeeds location constraint gd2a." + } ], - "CommandHelp": [ - { - "Name": "du", - "Short": "Return disk usage information for a specified directory", - "Long": "The usage information returned, includes the targeted directory as well as all\nfiles stored in any sub-directories that may exist.", - "Opts": null - }, - { - "Name": "symlink", - "Short": "You can create a symbolic link in ObjectStore with the symlink action.", - "Long": "The desired path location (including applicable sub-directories) ending in\nthe object that will be the target of the symlink (for example, /links/mylink).\nInclude the file extension for the object, if applicable.\n`rclone backend symlink \u003csrc\u003e \u003cpath\u003e`", - "Opts": null - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "connection_retries", + "FieldName": "", + "Help": "Number of connection retries.", + "Default": 3, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "3", + "ValueStr": "3", + "Type": "int" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to chunked upload.\n\nAny files larger than this will be uploaded in chunks of chunk_size.\nThe minimum is 0 and the maximum is 5 GiB.", + "Default": 209715200, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "200Mi", + "ValueStr": "200Mi", + "Type": "SizeSuffix" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Chunk size to use for uploading.\n\nWhen uploading files larger than upload_cutoff they will be uploaded\nas multipart uploads using this chunk size.\n\nNote that \"--qingstor-upload-concurrency\" chunks of this size are buffered\nin memory per transfer.\n\nIf you are transferring large files over high-speed links and you have\nenough memory, then increasing this will speed up the transfers.", + "Default": 4194304, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4Mi", + "ValueStr": "4Mi", + "Type": "SizeSuffix" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nNB if you set this to > 1 then the checksums of multipart uploads\nbecome corrupted (the uploads themselves are not corrupted though).\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", + "Default": 1, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1", + "ValueStr": "1", + "Type": "int" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 16842754, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Ctl,InvalidUtf8", + "ValueStr": "Slash,Ctl,InvalidUtf8", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "quatrix", + "Description": "Quatrix by Maytech", + "Prefix": "quatrix", + "Options": [ + { + "Name": "api_key", + "FieldName": "", + "Help": "API key for accessing Quatrix account", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "host", + "FieldName": "", + "Help": "Host name of Quatrix account", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438146, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "effective_upload_time", + "FieldName": "", + "Help": "Wanted upload time for one chunk", + "Default": "4s", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4s", + "ValueStr": "4s", + "Type": "string" + }, + { + "Name": "minimal_chunk_size", + "FieldName": "", + "Help": "The minimal size for one chunk", + "Default": 10000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "9.537Mi", + "ValueStr": "9.537Mi", + "Type": "SizeSuffix" + }, + { + "Name": "maximal_summary_chunk_size", + "FieldName": "", + "Help": "The maximal summary for all chunks. It should not be less than 'transfers'*'minimal_chunk_size'", + "Default": 100000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "95.367Mi", + "ValueStr": "95.367Mi", + "Type": "SizeSuffix" + }, + { + "Name": "hard_delete", + "FieldName": "", + "Help": "Delete files permanently rather than putting them into the trash", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "skip_project_folders", + "FieldName": "", + "Help": "Skip project folders in operations", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "s3", + "Description": "Amazon S3 Compliant Storage Providers including AWS, Alibaba, ArvanCloud, Ceph, ChinaMobile, Cloudflare, DigitalOcean, Dreamhost, Exaba, FlashBlade, GCS, HuaweiOBS, IBMCOS, IDrive, IONOS, LyveCloud, Leviia, Liara, Linode, Magalu, Mega, Minio, Netease, Outscale, Petabox, RackCorp, Rclone, Scaleway, SeaweedFS, Selectel, StackPath, Storj, Synology, TencentCOS, Wasabi, Qiniu and others", + "Prefix": "s3", + "Options": [ + { + "Name": "provider", + "FieldName": "", + "Help": "Choose your S3 provider.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "AWS", + "Help": "Amazon Web Services (AWS) S3" + }, + { + "Value": "Alibaba", + "Help": "Alibaba Cloud Object Storage System (OSS) formerly Aliyun" + }, + { + "Value": "ArvanCloud", + "Help": "Arvan Cloud Object Storage (AOS)" + }, + { + "Value": "Ceph", + "Help": "Ceph Object Storage" + }, + { + "Value": "ChinaMobile", + "Help": "China Mobile Ecloud Elastic Object Storage (EOS)" + }, + { + "Value": "Cloudflare", + "Help": "Cloudflare R2 Storage" + }, + { + "Value": "DigitalOcean", + "Help": "DigitalOcean Spaces" + }, + { + "Value": "Dreamhost", + "Help": "Dreamhost DreamObjects" + }, + { + "Value": "Exaba", + "Help": "Exaba Object Storage" + }, + { + "Value": "FlashBlade", + "Help": "Pure Storage FlashBlade Object Storage" + }, + { + "Value": "GCS", + "Help": "Google Cloud Storage" + }, + { + "Value": "HuaweiOBS", + "Help": "Huawei Object Storage Service" + }, + { + "Value": "IBMCOS", + "Help": "IBM COS S3" + }, + { + "Value": "IDrive", + "Help": "IDrive e2" + }, + { + "Value": "IONOS", + "Help": "IONOS Cloud" + }, + { + "Value": "LyveCloud", + "Help": "Seagate Lyve Cloud" + }, + { + "Value": "Leviia", + "Help": "Leviia Object Storage" + }, + { + "Value": "Liara", + "Help": "Liara Object Storage" + }, + { + "Value": "Linode", + "Help": "Linode Object Storage" + }, + { + "Value": "Magalu", + "Help": "Magalu Object Storage" + }, + { + "Value": "Mega", + "Help": "MEGA S4 Object Storage" + }, + { + "Value": "Minio", + "Help": "Minio Object Storage" + }, + { + "Value": "Netease", + "Help": "Netease Object Storage (NOS)" + }, + { + "Value": "Outscale", + "Help": "OUTSCALE Object Storage (OOS)" + }, + { + "Value": "Petabox", + "Help": "Petabox Object Storage" + }, + { + "Value": "RackCorp", + "Help": "RackCorp Object Storage" + }, + { + "Value": "Rclone", + "Help": "Rclone S3 Server" + }, + { + "Value": "Scaleway", + "Help": "Scaleway Object Storage" + }, + { + "Value": "SeaweedFS", + "Help": "SeaweedFS S3" + }, + { + "Value": "Selectel", + "Help": "Selectel Object Storage" + }, + { + "Value": "StackPath", + "Help": "StackPath Object Storage" + }, + { + "Value": "Storj", + "Help": "Storj (S3 Compatible Gateway)" + }, + { + "Value": "Synology", + "Help": "Synology C2 Object Storage" + }, + { + "Value": "TencentCOS", + "Help": "Tencent Cloud Object Storage (COS)" + }, + { + "Value": "Wasabi", + "Help": "Wasabi Object Storage" + }, + { + "Value": "Qiniu", + "Help": "Qiniu Object Storage (Kodo)" + }, + { + "Value": "Other", + "Help": "Any other S3 compatible provider" + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "onedrive", - "Description": "Microsoft OneDrive", - "Prefix": "onedrive", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Choose national cloud region for OneDrive.", - "Provider": "", - "Default": "global", - "Value": null, - "Examples": [ - { - "Value": "global", - "Help": "Microsoft Cloud Global", - "Provider": "" - }, - { - "Value": "us", - "Help": "Microsoft Cloud for US Government", - "Provider": "" - }, - { - "Value": "de", - "Help": "Microsoft Cloud Germany", - "Provider": "" - }, - { - "Value": "cn", - "Help": "Azure and Office 365 operated by Vnet Group in China", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "global", - "ValueStr": "global", - "Type": "string" - }, - { - "Name": "chunk_size", - "Help": "Chunk size to upload files with - must be multiple of 320k (327,680 bytes).\n\nAbove this size files will be chunked - must be multiple of 320k (327,680 bytes) and\nshould not exceed 250M (262,144,000 bytes) else you may encounter \\\"Microsoft.SharePoint.Client.InvalidClientQueryException: The request message is too big.\\\"\nNote that the chunks will be buffered into memory.", - "Provider": "", - "Default": 10485760, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10Mi", - "ValueStr": "10Mi", - "Type": "SizeSuffix" - }, - { - "Name": "drive_id", - "Help": "The ID of the drive to use.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "drive_type", - "Help": "The type of the drive (personal | business | documentLibrary).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "root_folder_id", - "Help": "ID of the root folder.\n\nThis isn't normally needed, but in special circumstances you might\nknow the folder ID that you wish to access but not be able to get\nthere through a path traversal.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "access_scopes", - "Help": "Set scopes to be requested by rclone.\n\nChoose or manually enter a custom space separated list with all scopes, that rclone should request.\n", - "Provider": "", - "Default": [ - "Files.Read", - "Files.ReadWrite", - "Files.Read.All", - "Files.ReadWrite.All", - "Sites.Read.All", - "offline_access" - ], - "Value": null, - "Examples": [ - { - "Value": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access", - "Help": "Read and write access to all resources", - "Provider": "" - }, - { - "Value": "Files.Read Files.Read.All Sites.Read.All offline_access", - "Help": "Read only access to all resources", - "Provider": "" - }, - { - "Value": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All offline_access", - "Help": "Read and write access to all resources, without the ability to browse SharePoint sites. \nSame as if disable_site_permission was set to true", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access", - "ValueStr": "Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access", - "Type": "SpaceSepList" - }, - { - "Name": "disable_site_permission", - "Help": "Disable the request for Sites.Read.All permission.\n\nIf set to true, you will no longer be able to search for a SharePoint site when\nconfiguring drive ID, because rclone will not request Sites.Read.All permission.\nSet it to true if your organization didn't assign Sites.Read.All permission to the\napplication, and your organization disallows users to consent app permission\nrequest on their own.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "expose_onenote_files", - "Help": "Set to make OneNote files show up in directory listings.\n\nBy default, rclone will hide OneNote files in directory listings because\noperations like \"Open\" and \"Update\" won't work on them. But this\nbehaviour may also prevent you from deleting them. If you want to\ndelete OneNote files or otherwise want them to show up in directory\nlisting, set this option.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "server_side_across_configs", - "Help": "Deprecated: use --server-side-across-configs instead.\n\nAllow server-side operations (e.g. copy) to work across different onedrive configs.\n\nThis will only work if you are copying between two OneDrive *Personal* drives AND\nthe files to copy are already shared between them. In other cases, rclone will\nfall back to normal copy (which will be slightly slower).", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "list_chunk", - "Help": "Size of listing chunk.", - "Provider": "", - "Default": 1000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1000", - "ValueStr": "1000", - "Type": "int" - }, - { - "Name": "no_versions", - "Help": "Remove all versions on modifying operations.\n\nOnedrive for business creates versions when rclone uploads new files\noverwriting an existing one and when it sets the modification time.\n\nThese versions take up space out of the quota.\n\nThis flag checks for versions after file upload and setting\nmodification time and removes all but the last version.\n\n**NB** Onedrive personal can't currently delete versions so don't use\nthis flag there.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "link_scope", - "Help": "Set the scope of the links created by the link command.", - "Provider": "", - "Default": "anonymous", - "Value": null, - "Examples": [ - { - "Value": "anonymous", - "Help": "Anyone with the link has access, without needing to sign in.\nThis may include people outside of your organization.\nAnonymous link support may be disabled by an administrator.", - "Provider": "" - }, - { - "Value": "organization", - "Help": "Anyone signed into your organization (tenant) can use the link to get access.\nOnly available in OneDrive for Business and SharePoint.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "anonymous", - "ValueStr": "anonymous", - "Type": "string" - }, - { - "Name": "link_type", - "Help": "Set the type of the links created by the link command.", - "Provider": "", - "Default": "view", - "Value": null, - "Examples": [ - { - "Value": "view", - "Help": "Creates a read-only link to the item.", - "Provider": "" - }, - { - "Value": "edit", - "Help": "Creates a read-write link to the item.", - "Provider": "" - }, - { - "Value": "embed", - "Help": "Creates an embeddable link to the item.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "view", - "ValueStr": "view", - "Type": "string" - }, - { - "Name": "link_password", - "Help": "Set the password for links created by the link command.\n\nAt the time of writing this only works with OneDrive personal paid accounts.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "hash_type", - "Help": "Specify the hash in use for the backend.\n\nThis specifies the hash type in use. If set to \"auto\" it will use the\ndefault hash which is QuickXorHash.\n\nBefore rclone 1.62 an SHA1 hash was used by default for Onedrive\nPersonal. For 1.62 and later the default is to use a QuickXorHash for\nall onedrive types. If an SHA1 hash is desired then set this option\naccordingly.\n\nFrom July 2023 QuickXorHash will be the only available hash for\nboth OneDrive for Business and OneDriver Personal.\n\nThis can be set to \"none\" to not use any hashes.\n\nIf the hash requested does not exist on the object, it will be\nreturned as an empty string which is treated as a missing hash by\nrclone.\n", - "Provider": "", - "Default": "auto", - "Value": null, - "Examples": [ - { - "Value": "auto", - "Help": "Rclone chooses the best hash", - "Provider": "" - }, - { - "Value": "quickxor", - "Help": "QuickXor", - "Provider": "" - }, - { - "Value": "sha1", - "Help": "SHA1", - "Provider": "" - }, - { - "Value": "sha256", - "Help": "SHA256", - "Provider": "" - }, - { - "Value": "crc32", - "Help": "CRC32", - "Provider": "" - }, - { - "Value": "none", - "Help": "None - don't use any hashes", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "auto", - "ValueStr": "auto", - "Type": "string" - }, - { - "Name": "av_override", - "Help": "Allows download of files the server thinks has a virus.\n\nThe onedrive/sharepoint server may check files uploaded with an Anti\nVirus checker. If it detects any potential viruses or malware it will\nblock download of the file.\n\nIn this case you will see a message like this\n\n server reports this file is infected with a virus - use --onedrive-av-override to download anyway: Infected (name of virus): 403 Forbidden: \n\nIf you are 100% sure you want to download this file anyway then use\nthe --onedrive-av-override flag, or av_override = true in the config\nfile.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 57386894, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "env_auth", + "FieldName": "", + "Help": "Get AWS credentials from runtime (environment variables or EC2/ECS meta data if no env vars).\n\nOnly applies if access_key_id and secret_access_key is blank.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "false", + "Help": "Enter AWS credentials in the next step." + }, + { + "Value": "true", + "Help": "Get AWS credentials from the environment (env vars or IAM)." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "opendrive", - "Description": "OpenDrive", - "Prefix": "opendrive", - "Options": [ - { - "Name": "username", - "Help": "Username.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password", - "Help": "Password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 62007182, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,LeftSpace,LeftCrLfHtVt,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,LeftSpace,LeftCrLfHtVt,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot", - "Type": "MultiEncoder" - }, - { - "Name": "chunk_size", - "Help": "Files will be uploaded in chunks this size.\n\nNote that these chunks are buffered in memory so increasing them will\nincrease memory use.", - "Provider": "", - "Default": 10485760, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10Mi", - "ValueStr": "10Mi", - "Type": "SizeSuffix" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "access_key_id", + "FieldName": "", + "Help": "AWS Access Key ID.\n\nLeave blank for anonymous access or runtime credentials.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "secret_access_key", + "FieldName": "", + "Help": "AWS Secret Access Key (password).\n\nLeave blank for anonymous access or runtime credentials.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region to connect to.", + "Provider": "AWS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "us-east-1", + "Help": "The default endpoint - a good choice if you are unsure.\nUS Region, Northern Virginia, or Pacific Northwest.\nLeave location constraint empty." + }, + { + "Value": "us-east-2", + "Help": "US East (Ohio) Region.\nNeeds location constraint us-east-2." + }, + { + "Value": "us-west-1", + "Help": "US West (Northern California) Region.\nNeeds location constraint us-west-1." + }, + { + "Value": "us-west-2", + "Help": "US West (Oregon) Region.\nNeeds location constraint us-west-2." + }, + { + "Value": "ca-central-1", + "Help": "Canada (Central) Region.\nNeeds location constraint ca-central-1." + }, + { + "Value": "eu-west-1", + "Help": "EU (Ireland) Region.\nNeeds location constraint EU or eu-west-1." + }, + { + "Value": "eu-west-2", + "Help": "EU (London) Region.\nNeeds location constraint eu-west-2." + }, + { + "Value": "eu-west-3", + "Help": "EU (Paris) Region.\nNeeds location constraint eu-west-3." + }, + { + "Value": "eu-north-1", + "Help": "EU (Stockholm) Region.\nNeeds location constraint eu-north-1." + }, + { + "Value": "eu-south-1", + "Help": "EU (Milan) Region.\nNeeds location constraint eu-south-1." + }, + { + "Value": "eu-central-1", + "Help": "EU (Frankfurt) Region.\nNeeds location constraint eu-central-1." + }, + { + "Value": "ap-southeast-1", + "Help": "Asia Pacific (Singapore) Region.\nNeeds location constraint ap-southeast-1." + }, + { + "Value": "ap-southeast-2", + "Help": "Asia Pacific (Sydney) Region.\nNeeds location constraint ap-southeast-2." + }, + { + "Value": "ap-northeast-1", + "Help": "Asia Pacific (Tokyo) Region.\nNeeds location constraint ap-northeast-1." + }, + { + "Value": "ap-northeast-2", + "Help": "Asia Pacific (Seoul).\nNeeds location constraint ap-northeast-2." + }, + { + "Value": "ap-northeast-3", + "Help": "Asia Pacific (Osaka-Local).\nNeeds location constraint ap-northeast-3." + }, + { + "Value": "ap-south-1", + "Help": "Asia Pacific (Mumbai).\nNeeds location constraint ap-south-1." + }, + { + "Value": "ap-east-1", + "Help": "Asia Pacific (Hong Kong) Region.\nNeeds location constraint ap-east-1." + }, + { + "Value": "sa-east-1", + "Help": "South America (Sao Paulo) Region.\nNeeds location constraint sa-east-1." + }, + { + "Value": "il-central-1", + "Help": "Israel (Tel Aviv) Region.\nNeeds location constraint il-central-1." + }, + { + "Value": "me-south-1", + "Help": "Middle East (Bahrain) Region.\nNeeds location constraint me-south-1." + }, + { + "Value": "af-south-1", + "Help": "Africa (Cape Town) Region.\nNeeds location constraint af-south-1." + }, + { + "Value": "cn-north-1", + "Help": "China (Beijing) Region.\nNeeds location constraint cn-north-1." + }, + { + "Value": "cn-northwest-1", + "Help": "China (Ningxia) Region.\nNeeds location constraint cn-northwest-1." + }, + { + "Value": "us-gov-east-1", + "Help": "AWS GovCloud (US-East) Region.\nNeeds location constraint us-gov-east-1." + }, + { + "Value": "us-gov-west-1", + "Help": "AWS GovCloud (US) Region.\nNeeds location constraint us-gov-west-1." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "oracleobjectstorage", - "Description": "Oracle Cloud Infrastructure Object Storage", - "Prefix": "oos", - "Options": [ - { - "Name": "provider", - "Help": "Choose your Auth Provider", - "Provider": "", - "Default": "env_auth", - "Value": null, - "Examples": [ - { - "Value": "env_auth", - "Help": "automatically pickup the credentials from runtime(env), first one to provide auth wins", - "Provider": "" - }, - { - "Value": "user_principal_auth", - "Help": "use an OCI user and an API key for authentication.\nyou’ll need to put in a config file your tenancy OCID, user OCID, region, the path, fingerprint to an API key.\nhttps://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm", - "Provider": "" - }, - { - "Value": "instance_principal_auth", - "Help": "use instance principals to authorize an instance to make API calls. \neach instance has its own identity, and authenticates using the certificates that are read from instance metadata. \nhttps://docs.oracle.com/en-us/iaas/Content/Identity/Tasks/callingservicesfrominstances.htm", - "Provider": "" - }, - { - "Value": "resource_principal_auth", - "Help": "use resource principals to make API calls", - "Provider": "" - }, - { - "Value": "no_auth", - "Help": "no credentials needed, this is typically for reading public buckets", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "env_auth", - "ValueStr": "env_auth", - "Type": "string" - }, - { - "Name": "namespace", - "Help": "Object storage namespace", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "compartment", - "Help": "Object storage compartment OCID", - "Provider": "!no_auth", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Object storage Region", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Object storage API.\n\nLeave blank to use the default endpoint for the region.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "config_file", - "Help": "Path to OCI config file", - "Provider": "user_principal_auth", - "Default": "~/.oci/config", - "Value": null, - "Examples": [ - { - "Value": "~/.oci/config", - "Help": "oci configuration file location", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "~/.oci/config", - "ValueStr": "~/.oci/config", - "Type": "string" - }, - { - "Name": "config_profile", - "Help": "Profile name inside the oci config file", - "Provider": "user_principal_auth", - "Default": "Default", - "Value": null, - "Examples": [ - { - "Value": "Default", - "Help": "Use the default profile", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Default", - "ValueStr": "Default", - "Type": "string" - }, - { - "Name": "storage_tier", - "Help": "The storage class to use when storing new objects in storage. https://docs.oracle.com/en-us/iaas/Content/Object/Concepts/understandingstoragetiers.htm", - "Provider": "", - "Default": "Standard", - "Value": null, - "Examples": [ - { - "Value": "Standard", - "Help": "Standard storage tier, this is the default tier", - "Provider": "" - }, - { - "Value": "InfrequentAccess", - "Help": "InfrequentAccess storage tier", - "Provider": "" - }, - { - "Value": "Archive", - "Help": "Archive storage tier", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Standard", - "ValueStr": "Standard", - "Type": "string" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to chunked upload.\n\nAny files larger than this will be uploaded in chunks of chunk_size.\nThe minimum is 0 and the maximum is 5 GiB.", - "Provider": "", - "Default": 209715200, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "200Mi", - "ValueStr": "200Mi", - "Type": "SizeSuffix" - }, - { - "Name": "chunk_size", - "Help": "Chunk size to use for uploading.\n\nWhen uploading files larger than upload_cutoff or files with unknown\nsize (e.g. from \"rclone rcat\" or uploaded with \"rclone mount\" or google\nphotos or google docs) they will be uploaded as multipart uploads\nusing this chunk size.\n\nNote that \"upload_concurrency\" chunks of this size are buffered\nin memory per transfer.\n\nIf you are transferring large files over high-speed links and you have\nenough memory, then increasing this will speed up the transfers.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of known size to stay below the 10,000 chunks limit.\n\nFiles of unknown size are uploaded with the configured\nchunk_size. Since the default chunk size is 5 MiB and there can be at\nmost 10,000 chunks, this means that by default the maximum size of\na file you can stream upload is 48 GiB. If you wish to stream upload\nlarger files then you will need to increase chunk_size.\n\nIncreasing the chunk size decreases the accuracy of the progress\nstatistics displayed with \"-P\" flag.\n", - "Provider": "", - "Default": 5242880, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "5Mi", - "ValueStr": "5Mi", - "Type": "SizeSuffix" - }, - { - "Name": "upload_concurrency", - "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", - "Provider": "", - "Default": 10, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10", - "ValueStr": "10", - "Type": "int" - }, - { - "Name": "copy_cutoff", - "Help": "Cutoff for switching to multipart copy.\n\nAny files larger than this that need to be server-side copied will be\ncopied in chunks of this size.\n\nThe minimum is 0 and the maximum is 5 GiB.", - "Provider": "", - "Default": 4999610368, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4.656Gi", - "ValueStr": "4.656Gi", - "Type": "SizeSuffix" - }, - { - "Name": "copy_timeout", - "Help": "Timeout for copy.\n\nCopy is an asynchronous operation, specify timeout to wait for copy to succeed\n", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "disable_checksum", - "Help": "Don't store MD5 checksum with object metadata.\n\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50331650, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,InvalidUtf8,Dot", - "ValueStr": "Slash,InvalidUtf8,Dot", - "Type": "MultiEncoder" - }, - { - "Name": "leave_parts_on_error", - "Help": "If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery.\n\nIt should be set to true for resuming uploads across different sessions.\n\nWARNING: Storing parts of an incomplete multipart upload counts towards space usage on object storage and will add\nadditional costs if not cleaned up.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_check_bucket", - "Help": "If set, don't attempt to check the bucket exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the bucket exists already.\n\nIt can also be needed if the user you are using does not have bucket\ncreation permissions.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "sse_customer_key_file", - "Help": "To use SSE-C, a file containing the base64-encoded string of the AES-256 encryption key associated\nwith the object. Please note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is needed.'", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_customer_key", - "Help": "To use SSE-C, the optional header that specifies the base64-encoded 256-bit encryption key to use to\nencrypt or decrypt the data. Please note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is\nneeded. For more information, see Using Your Own Keys for Server-Side Encryption \n(https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm)", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_customer_key_sha256", - "Help": "If using SSE-C, The optional header that specifies the base64-encoded SHA256 hash of the encryption\nkey. This value is used to check the integrity of the encryption key. see Using Your Own Keys for \nServer-Side Encryption (https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm).", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_kms_key_id", - "Help": "if using your own master key in vault, this header specifies the\nOCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of a master encryption key used to call\nthe Key Management service to generate a data encryption key or to encrypt or decrypt a data encryption key.\nPlease note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is needed.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_customer_algorithm", - "Help": "If using SSE-C, the optional header that specifies \"AES256\" as the encryption algorithm.\nObject Storage supports \"AES256\" as the encryption algorithm. For more information, see\nUsing Your Own Keys for Server-Side Encryption (https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm).", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - }, - { - "Value": "AES256", - "Help": "AES256", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "region - the location where your bucket will be created and your data stored.\n", + "Provider": "RackCorp", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "global", + "Help": "Global CDN (All locations) Region" + }, + { + "Value": "au", + "Help": "Australia (All states)" + }, + { + "Value": "au-nsw", + "Help": "NSW (Australia) Region" + }, + { + "Value": "au-qld", + "Help": "QLD (Australia) Region" + }, + { + "Value": "au-vic", + "Help": "VIC (Australia) Region" + }, + { + "Value": "au-wa", + "Help": "Perth (Australia) Region" + }, + { + "Value": "ph", + "Help": "Manila (Philippines) Region" + }, + { + "Value": "th", + "Help": "Bangkok (Thailand) Region" + }, + { + "Value": "hk", + "Help": "HK (Hong Kong) Region" + }, + { + "Value": "mn", + "Help": "Ulaanbaatar (Mongolia) Region" + }, + { + "Value": "kg", + "Help": "Bishkek (Kyrgyzstan) Region" + }, + { + "Value": "id", + "Help": "Jakarta (Indonesia) Region" + }, + { + "Value": "jp", + "Help": "Tokyo (Japan) Region" + }, + { + "Value": "sg", + "Help": "SG (Singapore) Region" + }, + { + "Value": "de", + "Help": "Frankfurt (Germany) Region" + }, + { + "Value": "us", + "Help": "USA (AnyCast) Region" + }, + { + "Value": "us-east-1", + "Help": "New York (USA) Region" + }, + { + "Value": "us-west-1", + "Help": "Freemont (USA) Region" + }, + { + "Value": "nz", + "Help": "Auckland (New Zealand) Region" + } ], - "CommandHelp": [ - { - "Name": "rename", - "Short": "change the name of an object", - "Long": "This command can be used to rename a object.\n\nUsage Examples:\n\n rclone backend rename oos:bucket relative-object-path-under-bucket object-new-name\n", - "Opts": null - }, - { - "Name": "list-multipart-uploads", - "Short": "List the unfinished multipart uploads", - "Long": "This command lists the unfinished multipart uploads in JSON format.\n\n rclone backend list-multipart-uploads oos:bucket/path/to/object\n\nIt returns a dictionary of buckets with values as lists of unfinished\nmultipart uploads.\n\nYou can call it with no bucket in which case it lists all bucket, with\na bucket or with a bucket and path.\n\n {\n \"test-bucket\": [\n {\n \"namespace\": \"test-namespace\",\n \"bucket\": \"test-bucket\",\n \"object\": \"600m.bin\",\n \"uploadId\": \"51dd8114-52a4-b2f2-c42f-5291f05eb3c8\",\n \"timeCreated\": \"2022-07-29T06:21:16.595Z\",\n \"storageTier\": \"Standard\"\n }\n ]\n", - "Opts": null - }, - { - "Name": "cleanup", - "Short": "Remove unfinished multipart uploads.", - "Long": "This command removes unfinished multipart uploads of age greater than\nmax-age which defaults to 24 hours.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup oos:bucket/path/to/object\n rclone backend cleanup -o max-age=7w oos:bucket/path/to/object\n\nDurations are parsed as per the rest of rclone, 2h, 7d, 7w etc.\n", - "Opts": { - "max-age": "Max age of upload to delete" - } - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region to connect to.", + "Provider": "Scaleway", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "nl-ams", + "Help": "Amsterdam, The Netherlands" + }, + { + "Value": "fr-par", + "Help": "Paris, France" + }, + { + "Value": "pl-waw", + "Help": "Warsaw, Poland" + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "pcloud", - "Description": "Pcloud", - "Prefix": "pcloud", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50438146, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - }, - { - "Name": "root_folder_id", - "Help": "Fill in for rclone to use a non root folder as its starting point.", - "Provider": "", - "Default": "d0", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "d0", - "ValueStr": "d0", - "Type": "string" - }, - { - "Name": "hostname", - "Help": "Hostname to connect to.\n\nThis is normally set when rclone initially does the oauth connection,\nhowever you will need to set it by hand if you are using remote config\nwith rclone authorize.\n", - "Provider": "", - "Default": "api.pcloud.com", - "Value": null, - "Examples": [ - { - "Value": "api.pcloud.com", - "Help": "Original/US region", - "Provider": "" - }, - { - "Value": "eapi.pcloud.com", - "Help": "EU region", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "api.pcloud.com", - "ValueStr": "api.pcloud.com", - "Type": "string" - }, - { - "Name": "username", - "Help": "Your pcloud username.\n\t\t\t\nThis is only required when you want to use the cleanup command. Due to a bug\nin the pcloud API the required API does not support OAuth authentication so\nwe have to rely on user password authentication for it.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password", - "Help": "Your pcloud password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region to connect to. - the location where your bucket will be created and your data stored. Need bo be same with your endpoint.\n", + "Provider": "HuaweiOBS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "af-south-1", + "Help": "AF-Johannesburg" + }, + { + "Value": "ap-southeast-2", + "Help": "AP-Bangkok" + }, + { + "Value": "ap-southeast-3", + "Help": "AP-Singapore" + }, + { + "Value": "cn-east-3", + "Help": "CN East-Shanghai1" + }, + { + "Value": "cn-east-2", + "Help": "CN East-Shanghai2" + }, + { + "Value": "cn-north-1", + "Help": "CN North-Beijing1" + }, + { + "Value": "cn-north-4", + "Help": "CN North-Beijing4" + }, + { + "Value": "cn-south-1", + "Help": "CN South-Guangzhou" + }, + { + "Value": "ap-southeast-1", + "Help": "CN-Hong Kong" + }, + { + "Value": "sa-argentina-1", + "Help": "LA-Buenos Aires1" + }, + { + "Value": "sa-peru-1", + "Help": "LA-Lima1" + }, + { + "Value": "na-mexico-1", + "Help": "LA-Mexico City1" + }, + { + "Value": "sa-chile-1", + "Help": "LA-Santiago2" + }, + { + "Value": "sa-brazil-1", + "Help": "LA-Sao Paulo1" + }, + { + "Value": "ru-northwest-2", + "Help": "RU-Moscow2" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "pikpak", - "Description": "PikPak", - "Prefix": "pikpak", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "Pikpak username.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "pass", - "Help": "Pikpak password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "root_folder_id", - "Help": "ID of the root folder.\nLeave blank normally.\n\nFill in for rclone to use a non root folder as its starting point.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "use_trash", - "Help": "Send files to the trash instead of deleting permanently.\n\nDefaults to true, namely sending files to the trash.\nUse `--pikpak-use-trash=false` to delete files permanently instead.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "trashed_only", - "Help": "Only show files that are in the trash.\n\nThis will show trashed files in their original directory structure.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "hash_memory_limit", - "Help": "Files bigger than this will be cached on disk to calculate hash if required.", - "Provider": "", - "Default": 10485760, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10Mi", - "ValueStr": "10Mi", - "Type": "SizeSuffix" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 56829838, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,RightSpace,RightPeriod,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,RightSpace,RightPeriod,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region to connect to.", + "Provider": "Cloudflare", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "auto", + "Help": "R2 buckets are automatically distributed across Cloudflare's data centers for low latency." + } ], - "CommandHelp": [ - { - "Name": "addurl", - "Short": "Add offline download task for url", - "Long": "This command adds offline download task for url.\n\nUsage:\n\n rclone backend addurl pikpak:dirpath url\n\nDownloads will be stored in 'dirpath'. If 'dirpath' is invalid, \ndownload will fallback to default 'My Pack' folder.\n", - "Opts": null - }, - { - "Name": "decompress", - "Short": "Request decompress of a file/files in a folder", - "Long": "This command requests decompress of file/files in a folder.\n\nUsage:\n\n rclone backend decompress pikpak:dirpath {filename} -o password=password\n rclone backend decompress pikpak:dirpath {filename} -o delete-src-file\n\nAn optional argument 'filename' can be specified for a file located in \n'pikpak:dirpath'. You may want to pass '-o password=password' for a \npassword-protected files. Also, pass '-o delete-src-file' to delete \nsource files after decompression finished.\n\nResult:\n\n {\n \"Decompressed\": 17,\n \"SourceDeleted\": 0,\n \"Errors\": 0\n }\n", - "Opts": null - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region to connect to.", + "Provider": "Qiniu", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "cn-east-1", + "Help": "The default endpoint - a good choice if you are unsure.\nEast China Region 1.\nNeeds location constraint cn-east-1." + }, + { + "Value": "cn-east-2", + "Help": "East China Region 2.\nNeeds location constraint cn-east-2." + }, + { + "Value": "cn-north-1", + "Help": "North China Region 1.\nNeeds location constraint cn-north-1." + }, + { + "Value": "cn-south-1", + "Help": "South China Region 1.\nNeeds location constraint cn-south-1." + }, + { + "Value": "us-north-1", + "Help": "North America Region.\nNeeds location constraint us-north-1." + }, + { + "Value": "ap-southeast-1", + "Help": "Southeast Asia Region 1.\nNeeds location constraint ap-southeast-1." + }, + { + "Value": "ap-northeast-1", + "Help": "Northeast Asia Region 1.\nNeeds location constraint ap-northeast-1." + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "premiumizeme", - "Description": "premiumize.me", - "Prefix": "premiumizeme", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "api_key", - "Help": "API Key.\n\nThis is not normally used - use oauth instead.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50438154, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,DoubleQuote,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,DoubleQuote,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region where your bucket will be created and your data stored.\n", + "Provider": "IONOS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "de", + "Help": "Frankfurt, Germany" + }, + { + "Value": "eu-central-2", + "Help": "Berlin, Germany" + }, + { + "Value": "eu-south-2", + "Help": "Logrono, Spain" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "protondrive", - "Description": "Proton Drive", - "Prefix": "protondrive", - "Options": [ - { - "Name": "username", - "Help": "The username of your proton drive account", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "password", - "Help": "The password of your proton drive account.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "2fa", - "Help": "The 2FA code\nThe 2FA code of your proton drive account if the account is set up with \ntwo-factor authentication", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 52559874, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LeftSpace,RightSpace,InvalidUtf8,Dot", - "ValueStr": "Slash,LeftSpace,RightSpace,InvalidUtf8,Dot", - "Type": "MultiEncoder" - }, - { - "Name": "original_file_size", - "Help": "Return the file size before encryption\n\t\t\t\nThe size of the encrypted file will be different from (bigger than) the \noriginal file size. Unless there is a reason to return the file size \nafter encryption is performed, otherwise, set this option to true, as \nfeatures like Open() which will need to be supplied with original content \nsize, will fail to operate properly", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "app_version", - "Help": "The app version string \n\nThe app version string indicates the client that is currently performing \nthe API request. This information is required and will be sent with every \nAPI request.", - "Provider": "", - "Default": "macos-drive@1.0.0-alpha.1+rclone", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "macos-drive@1.0.0-alpha.1+rclone", - "ValueStr": "macos-drive@1.0.0-alpha.1+rclone", - "Type": "string" - }, - { - "Name": "replace_existing_draft", - "Help": "Create a new revision when filename conflict is detected\n\nWhen a file upload is cancelled or failed before completion, a draft will be \ncreated and the subsequent upload of the same file to the same location will be \nreported as a conflict.\n\nIf the option is set to true, the draft will be replaced and then the upload \noperation will restart. If there are other clients also uploading at the same \nfile location at the same time, the behavior is currently unknown. Need to set \nto true for integration tests.\nIf the option is set to false, an error \"a draft exist - usually this means a \nfile is being uploaded at another client, or, there was a failed upload attempt\" \nwill be returned, and no upload will happen.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "enable_caching", - "Help": "Caches the files and folders metadata to reduce API calls\n\nThe files and folders on ProtonDrive are represented as links with keyrings, \nwhich can be cached to improve performance and be friendly to the API server.\n\nThe cache is currently built for the case when the rclone is the only instance \nperforming operations to the mount point. The event system, which is the proton\nAPI system that provides visibility of what has changed on the drive, is yet \nto be implemented, so updates from other clients won’t be reflected in the \ncache. Thus, if there are concurrent clients accessing the same mount point, \nthen we might have a problem with caching the stale data.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region where your bucket will be created and your data stored.\n", + "Provider": "Outscale", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "eu-west-2", + "Help": "Paris, France" + }, + { + "Value": "us-east-2", + "Help": "New Jersey, USA" + }, + { + "Value": "us-west-1", + "Help": "California, USA" + }, + { + "Value": "cloudgouv-eu-west-1", + "Help": "SecNumCloud, Paris, France" + }, + { + "Value": "ap-northeast-1", + "Help": "Tokyo, Japan" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "putio", - "Description": "Put.io", - "Prefix": "putio", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50438146, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region where your bucket will be created and your data stored.\n", + "Provider": "Petabox", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "us-east-1", + "Help": "US East (N. Virginia)" + }, + { + "Value": "eu-central-1", + "Help": "Europe (Frankfurt)" + }, + { + "Value": "ap-southeast-1", + "Help": "Asia Pacific (Singapore)" + }, + { + "Value": "me-south-1", + "Help": "Middle East (Bahrain)" + }, + { + "Value": "sa-east-1", + "Help": "South America (São Paulo)" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "qingstor", - "Description": "QingCloud Object Storage", - "Prefix": "qingstor", - "Options": [ - { - "Name": "env_auth", - "Help": "Get QingStor credentials from runtime.\n\nOnly applies if access_key_id and secret_access_key is blank.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "false", - "Help": "Enter QingStor credentials in the next step.", - "Provider": "" - }, - { - "Value": "true", - "Help": "Get QingStor credentials from the environment (env vars or IAM).", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "access_key_id", - "Help": "QingStor Access Key ID.\n\nLeave blank for anonymous access or runtime credentials.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "secret_access_key", - "Help": "QingStor Secret Access Key (password).\n\nLeave blank for anonymous access or runtime credentials.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Enter an endpoint URL to connection QingStor API.\n\nLeave blank will use the default value \"https://qingstor.com:443\".", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "zone", - "Help": "Zone to connect to.\n\nDefault is \"pek3a\".", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "pek3a", - "Help": "The Beijing (China) Three Zone.\nNeeds location constraint pek3a.", - "Provider": "" - }, - { - "Value": "sh1a", - "Help": "The Shanghai (China) First Zone.\nNeeds location constraint sh1a.", - "Provider": "" - }, - { - "Value": "gd2a", - "Help": "The Guangdong (China) Second Zone.\nNeeds location constraint gd2a.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "connection_retries", - "Help": "Number of connection retries.", - "Provider": "", - "Default": 3, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "3", - "ValueStr": "3", - "Type": "int" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to chunked upload.\n\nAny files larger than this will be uploaded in chunks of chunk_size.\nThe minimum is 0 and the maximum is 5 GiB.", - "Provider": "", - "Default": 209715200, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "200Mi", - "ValueStr": "200Mi", - "Type": "SizeSuffix" - }, - { - "Name": "chunk_size", - "Help": "Chunk size to use for uploading.\n\nWhen uploading files larger than upload_cutoff they will be uploaded\nas multipart uploads using this chunk size.\n\nNote that \"--qingstor-upload-concurrency\" chunks of this size are buffered\nin memory per transfer.\n\nIf you are transferring large files over high-speed links and you have\nenough memory, then increasing this will speed up the transfers.", - "Provider": "", - "Default": 4194304, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4Mi", - "ValueStr": "4Mi", - "Type": "SizeSuffix" - }, - { - "Name": "upload_concurrency", - "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nNB if you set this to \u003e 1 then the checksums of multipart uploads\nbecome corrupted (the uploads themselves are not corrupted though).\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", - "Provider": "", - "Default": 1, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1", - "ValueStr": "1", - "Type": "int" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 16842754, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Ctl,InvalidUtf8", - "ValueStr": "Slash,Ctl,InvalidUtf8", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region where your data stored.\n", + "Provider": "Synology", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "eu-001", + "Help": "Europe Region 1" + }, + { + "Value": "eu-002", + "Help": "Europe Region 2" + }, + { + "Value": "us-001", + "Help": "US Region 1" + }, + { + "Value": "us-002", + "Help": "US Region 2" + }, + { + "Value": "tw-001", + "Help": "Asia (Taiwan)" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "s3", - "Description": "Amazon S3 Compliant Storage Providers including AWS, Alibaba, ArvanCloud, Ceph, China Mobile, Cloudflare, GCS, DigitalOcean, Dreamhost, Huawei OBS, IBM COS, IDrive e2, IONOS Cloud, Leviia, Liara, Lyve Cloud, Minio, Netease, Petabox, RackCorp, Scaleway, SeaweedFS, StackPath, Storj, Synology, Tencent COS, Qiniu and Wasabi", - "Prefix": "s3", - "Options": [ - { - "Name": "provider", - "Help": "Choose your S3 provider.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "AWS", - "Help": "Amazon Web Services (AWS) S3", - "Provider": "" - }, - { - "Value": "Alibaba", - "Help": "Alibaba Cloud Object Storage System (OSS) formerly Aliyun", - "Provider": "" - }, - { - "Value": "ArvanCloud", - "Help": "Arvan Cloud Object Storage (AOS)", - "Provider": "" - }, - { - "Value": "Ceph", - "Help": "Ceph Object Storage", - "Provider": "" - }, - { - "Value": "ChinaMobile", - "Help": "China Mobile Ecloud Elastic Object Storage (EOS)", - "Provider": "" - }, - { - "Value": "Cloudflare", - "Help": "Cloudflare R2 Storage", - "Provider": "" - }, - { - "Value": "DigitalOcean", - "Help": "DigitalOcean Spaces", - "Provider": "" - }, - { - "Value": "Dreamhost", - "Help": "Dreamhost DreamObjects", - "Provider": "" - }, - { - "Value": "GCS", - "Help": "Google Cloud Storage", - "Provider": "" - }, - { - "Value": "HuaweiOBS", - "Help": "Huawei Object Storage Service", - "Provider": "" - }, - { - "Value": "IBMCOS", - "Help": "IBM COS S3", - "Provider": "" - }, - { - "Value": "IDrive", - "Help": "IDrive e2", - "Provider": "" - }, - { - "Value": "IONOS", - "Help": "IONOS Cloud", - "Provider": "" - }, - { - "Value": "LyveCloud", - "Help": "Seagate Lyve Cloud", - "Provider": "" - }, - { - "Value": "Leviia", - "Help": "Leviia Object Storage", - "Provider": "" - }, - { - "Value": "Liara", - "Help": "Liara Object Storage", - "Provider": "" - }, - { - "Value": "Minio", - "Help": "Minio Object Storage", - "Provider": "" - }, - { - "Value": "Netease", - "Help": "Netease Object Storage (NOS)", - "Provider": "" - }, - { - "Value": "Petabox", - "Help": "Petabox Object Storage", - "Provider": "" - }, - { - "Value": "RackCorp", - "Help": "RackCorp Object Storage", - "Provider": "" - }, - { - "Value": "Scaleway", - "Help": "Scaleway Object Storage", - "Provider": "" - }, - { - "Value": "SeaweedFS", - "Help": "SeaweedFS S3", - "Provider": "" - }, - { - "Value": "StackPath", - "Help": "StackPath Object Storage", - "Provider": "" - }, - { - "Value": "Storj", - "Help": "Storj (S3 Compatible Gateway)", - "Provider": "" - }, - { - "Value": "Synology", - "Help": "Synology C2 Object Storage", - "Provider": "" - }, - { - "Value": "TencentCOS", - "Help": "Tencent Cloud Object Storage (COS)", - "Provider": "" - }, - { - "Value": "Wasabi", - "Help": "Wasabi Object Storage", - "Provider": "" - }, - { - "Value": "Qiniu", - "Help": "Qiniu Object Storage (Kodo)", - "Provider": "" - }, - { - "Value": "Other", - "Help": "Any other S3 compatible provider", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "env_auth", - "Help": "Get AWS credentials from runtime (environment variables or EC2/ECS meta data if no env vars).\n\nOnly applies if access_key_id and secret_access_key is blank.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "false", - "Help": "Enter AWS credentials in the next step.", - "Provider": "" - }, - { - "Value": "true", - "Help": "Get AWS credentials from the environment (env vars or IAM).", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "access_key_id", - "Help": "AWS Access Key ID.\n\nLeave blank for anonymous access or runtime credentials.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "secret_access_key", - "Help": "AWS Secret Access Key (password).\n\nLeave blank for anonymous access or runtime credentials.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region to connect to.", - "Provider": "AWS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "us-east-1", - "Help": "The default endpoint - a good choice if you are unsure.\nUS Region, Northern Virginia, or Pacific Northwest.\nLeave location constraint empty.", - "Provider": "" - }, - { - "Value": "us-east-2", - "Help": "US East (Ohio) Region.\nNeeds location constraint us-east-2.", - "Provider": "" - }, - { - "Value": "us-west-1", - "Help": "US West (Northern California) Region.\nNeeds location constraint us-west-1.", - "Provider": "" - }, - { - "Value": "us-west-2", - "Help": "US West (Oregon) Region.\nNeeds location constraint us-west-2.", - "Provider": "" - }, - { - "Value": "ca-central-1", - "Help": "Canada (Central) Region.\nNeeds location constraint ca-central-1.", - "Provider": "" - }, - { - "Value": "eu-west-1", - "Help": "EU (Ireland) Region.\nNeeds location constraint EU or eu-west-1.", - "Provider": "" - }, - { - "Value": "eu-west-2", - "Help": "EU (London) Region.\nNeeds location constraint eu-west-2.", - "Provider": "" - }, - { - "Value": "eu-west-3", - "Help": "EU (Paris) Region.\nNeeds location constraint eu-west-3.", - "Provider": "" - }, - { - "Value": "eu-north-1", - "Help": "EU (Stockholm) Region.\nNeeds location constraint eu-north-1.", - "Provider": "" - }, - { - "Value": "eu-south-1", - "Help": "EU (Milan) Region.\nNeeds location constraint eu-south-1.", - "Provider": "" - }, - { - "Value": "eu-central-1", - "Help": "EU (Frankfurt) Region.\nNeeds location constraint eu-central-1.", - "Provider": "" - }, - { - "Value": "ap-southeast-1", - "Help": "Asia Pacific (Singapore) Region.\nNeeds location constraint ap-southeast-1.", - "Provider": "" - }, - { - "Value": "ap-southeast-2", - "Help": "Asia Pacific (Sydney) Region.\nNeeds location constraint ap-southeast-2.", - "Provider": "" - }, - { - "Value": "ap-northeast-1", - "Help": "Asia Pacific (Tokyo) Region.\nNeeds location constraint ap-northeast-1.", - "Provider": "" - }, - { - "Value": "ap-northeast-2", - "Help": "Asia Pacific (Seoul).\nNeeds location constraint ap-northeast-2.", - "Provider": "" - }, - { - "Value": "ap-northeast-3", - "Help": "Asia Pacific (Osaka-Local).\nNeeds location constraint ap-northeast-3.", - "Provider": "" - }, - { - "Value": "ap-south-1", - "Help": "Asia Pacific (Mumbai).\nNeeds location constraint ap-south-1.", - "Provider": "" - }, - { - "Value": "ap-east-1", - "Help": "Asia Pacific (Hong Kong) Region.\nNeeds location constraint ap-east-1.", - "Provider": "" - }, - { - "Value": "sa-east-1", - "Help": "South America (Sao Paulo) Region.\nNeeds location constraint sa-east-1.", - "Provider": "" - }, - { - "Value": "me-south-1", - "Help": "Middle East (Bahrain) Region.\nNeeds location constraint me-south-1.", - "Provider": "" - }, - { - "Value": "af-south-1", - "Help": "Africa (Cape Town) Region.\nNeeds location constraint af-south-1.", - "Provider": "" - }, - { - "Value": "cn-north-1", - "Help": "China (Beijing) Region.\nNeeds location constraint cn-north-1.", - "Provider": "" - }, - { - "Value": "cn-northwest-1", - "Help": "China (Ningxia) Region.\nNeeds location constraint cn-northwest-1.", - "Provider": "" - }, - { - "Value": "us-gov-east-1", - "Help": "AWS GovCloud (US-East) Region.\nNeeds location constraint us-gov-east-1.", - "Provider": "" - }, - { - "Value": "us-gov-west-1", - "Help": "AWS GovCloud (US) Region.\nNeeds location constraint us-gov-west-1.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "region - the location where your bucket will be created and your data stored.\n", - "Provider": "RackCorp", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "global", - "Help": "Global CDN (All locations) Region", - "Provider": "" - }, - { - "Value": "au", - "Help": "Australia (All states)", - "Provider": "" - }, - { - "Value": "au-nsw", - "Help": "NSW (Australia) Region", - "Provider": "" - }, - { - "Value": "au-qld", - "Help": "QLD (Australia) Region", - "Provider": "" - }, - { - "Value": "au-vic", - "Help": "VIC (Australia) Region", - "Provider": "" - }, - { - "Value": "au-wa", - "Help": "Perth (Australia) Region", - "Provider": "" - }, - { - "Value": "ph", - "Help": "Manila (Philippines) Region", - "Provider": "" - }, - { - "Value": "th", - "Help": "Bangkok (Thailand) Region", - "Provider": "" - }, - { - "Value": "hk", - "Help": "HK (Hong Kong) Region", - "Provider": "" - }, - { - "Value": "mn", - "Help": "Ulaanbaatar (Mongolia) Region", - "Provider": "" - }, - { - "Value": "kg", - "Help": "Bishkek (Kyrgyzstan) Region", - "Provider": "" - }, - { - "Value": "id", - "Help": "Jakarta (Indonesia) Region", - "Provider": "" - }, - { - "Value": "jp", - "Help": "Tokyo (Japan) Region", - "Provider": "" - }, - { - "Value": "sg", - "Help": "SG (Singapore) Region", - "Provider": "" - }, - { - "Value": "de", - "Help": "Frankfurt (Germany) Region", - "Provider": "" - }, - { - "Value": "us", - "Help": "USA (AnyCast) Region", - "Provider": "" - }, - { - "Value": "us-east-1", - "Help": "New York (USA) Region", - "Provider": "" - }, - { - "Value": "us-west-1", - "Help": "Freemont (USA) Region", - "Provider": "" - }, - { - "Value": "nz", - "Help": "Auckland (New Zealand) Region", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region to connect to.", - "Provider": "Scaleway", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "nl-ams", - "Help": "Amsterdam, The Netherlands", - "Provider": "" - }, - { - "Value": "fr-par", - "Help": "Paris, France", - "Provider": "" - }, - { - "Value": "pl-waw", - "Help": "Warsaw, Poland", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region to connect to. - the location where your bucket will be created and your data stored. Need bo be same with your endpoint.\n", - "Provider": "HuaweiOBS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "af-south-1", - "Help": "AF-Johannesburg", - "Provider": "" - }, - { - "Value": "ap-southeast-2", - "Help": "AP-Bangkok", - "Provider": "" - }, - { - "Value": "ap-southeast-3", - "Help": "AP-Singapore", - "Provider": "" - }, - { - "Value": "cn-east-3", - "Help": "CN East-Shanghai1", - "Provider": "" - }, - { - "Value": "cn-east-2", - "Help": "CN East-Shanghai2", - "Provider": "" - }, - { - "Value": "cn-north-1", - "Help": "CN North-Beijing1", - "Provider": "" - }, - { - "Value": "cn-north-4", - "Help": "CN North-Beijing4", - "Provider": "" - }, - { - "Value": "cn-south-1", - "Help": "CN South-Guangzhou", - "Provider": "" - }, - { - "Value": "ap-southeast-1", - "Help": "CN-Hong Kong", - "Provider": "" - }, - { - "Value": "sa-argentina-1", - "Help": "LA-Buenos Aires1", - "Provider": "" - }, - { - "Value": "sa-peru-1", - "Help": "LA-Lima1", - "Provider": "" - }, - { - "Value": "na-mexico-1", - "Help": "LA-Mexico City1", - "Provider": "" - }, - { - "Value": "sa-chile-1", - "Help": "LA-Santiago2", - "Provider": "" - }, - { - "Value": "sa-brazil-1", - "Help": "LA-Sao Paulo1", - "Provider": "" - }, - { - "Value": "ru-northwest-2", - "Help": "RU-Moscow2", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region to connect to.", - "Provider": "Cloudflare", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "auto", - "Help": "R2 buckets are automatically distributed across Cloudflare's data centers for low latency.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region to connect to.", - "Provider": "Qiniu", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "cn-east-1", - "Help": "The default endpoint - a good choice if you are unsure.\nEast China Region 1.\nNeeds location constraint cn-east-1.", - "Provider": "" - }, - { - "Value": "cn-east-2", - "Help": "East China Region 2.\nNeeds location constraint cn-east-2.", - "Provider": "" - }, - { - "Value": "cn-north-1", - "Help": "North China Region 1.\nNeeds location constraint cn-north-1.", - "Provider": "" - }, - { - "Value": "cn-south-1", - "Help": "South China Region 1.\nNeeds location constraint cn-south-1.", - "Provider": "" - }, - { - "Value": "us-north-1", - "Help": "North America Region.\nNeeds location constraint us-north-1.", - "Provider": "" - }, - { - "Value": "ap-southeast-1", - "Help": "Southeast Asia Region 1.\nNeeds location constraint ap-southeast-1.", - "Provider": "" - }, - { - "Value": "ap-northeast-1", - "Help": "Northeast Asia Region 1.\nNeeds location constraint ap-northeast-1.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region where your bucket will be created and your data stored.\n", - "Provider": "IONOS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "de", - "Help": "Frankfurt, Germany", - "Provider": "" - }, - { - "Value": "eu-central-2", - "Help": "Berlin, Germany", - "Provider": "" - }, - { - "Value": "eu-south-2", - "Help": "Logrono, Spain", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region where your bucket will be created and your data stored.\n", - "Provider": "Petabox", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "us-east-1", - "Help": "US East (N. Virginia)", - "Provider": "" - }, - { - "Value": "eu-central-1", - "Help": "Europe (Frankfurt)", - "Provider": "" - }, - { - "Value": "ap-southeast-1", - "Help": "Asia Pacific (Singapore)", - "Provider": "" - }, - { - "Value": "me-south-1", - "Help": "Middle East (Bahrain)", - "Provider": "" - }, - { - "Value": "sa-east-1", - "Help": "South America (São Paulo)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region where your data stored.\n", - "Provider": "Synology", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "eu-001", - "Help": "Europe Region 1", - "Provider": "" - }, - { - "Value": "eu-002", - "Help": "Europe Region 2", - "Provider": "" - }, - { - "Value": "us-001", - "Help": "US Region 1", - "Provider": "" - }, - { - "Value": "us-002", - "Help": "US Region 2", - "Provider": "" - }, - { - "Value": "tw-001", - "Help": "Asia (Taiwan)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region to connect to.\n\nLeave blank if you are using an S3 clone and you don't have a region.", - "Provider": "!AWS,Alibaba,ArvanCloud,ChinaMobile,Cloudflare,IONOS,Petabox,Liara,Qiniu,RackCorp,Scaleway,Storj,Synology,TencentCOS,HuaweiOBS,IDrive", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Use this if unsure.\nWill use v4 signatures and an empty region.", - "Provider": "" - }, - { - "Value": "other-v2-signature", - "Help": "Use this only if v4 signatures don't work.\nE.g. pre Jewel/v10 CEPH.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for S3 API.\n\nLeave blank if using AWS to use the default endpoint for the region.", - "Provider": "AWS", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for China Mobile Ecloud Elastic Object Storage (EOS) API.", - "Provider": "ChinaMobile", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "eos-wuxi-1.cmecloud.cn", - "Help": "The default endpoint - a good choice if you are unsure.\nEast China (Suzhou)", - "Provider": "" - }, - { - "Value": "eos-jinan-1.cmecloud.cn", - "Help": "East China (Jinan)", - "Provider": "" - }, - { - "Value": "eos-ningbo-1.cmecloud.cn", - "Help": "East China (Hangzhou)", - "Provider": "" - }, - { - "Value": "eos-shanghai-1.cmecloud.cn", - "Help": "East China (Shanghai-1)", - "Provider": "" - }, - { - "Value": "eos-zhengzhou-1.cmecloud.cn", - "Help": "Central China (Zhengzhou)", - "Provider": "" - }, - { - "Value": "eos-hunan-1.cmecloud.cn", - "Help": "Central China (Changsha-1)", - "Provider": "" - }, - { - "Value": "eos-zhuzhou-1.cmecloud.cn", - "Help": "Central China (Changsha-2)", - "Provider": "" - }, - { - "Value": "eos-guangzhou-1.cmecloud.cn", - "Help": "South China (Guangzhou-2)", - "Provider": "" - }, - { - "Value": "eos-dongguan-1.cmecloud.cn", - "Help": "South China (Guangzhou-3)", - "Provider": "" - }, - { - "Value": "eos-beijing-1.cmecloud.cn", - "Help": "North China (Beijing-1)", - "Provider": "" - }, - { - "Value": "eos-beijing-2.cmecloud.cn", - "Help": "North China (Beijing-2)", - "Provider": "" - }, - { - "Value": "eos-beijing-4.cmecloud.cn", - "Help": "North China (Beijing-3)", - "Provider": "" - }, - { - "Value": "eos-huhehaote-1.cmecloud.cn", - "Help": "North China (Huhehaote)", - "Provider": "" - }, - { - "Value": "eos-chengdu-1.cmecloud.cn", - "Help": "Southwest China (Chengdu)", - "Provider": "" - }, - { - "Value": "eos-chongqing-1.cmecloud.cn", - "Help": "Southwest China (Chongqing)", - "Provider": "" - }, - { - "Value": "eos-guiyang-1.cmecloud.cn", - "Help": "Southwest China (Guiyang)", - "Provider": "" - }, - { - "Value": "eos-xian-1.cmecloud.cn", - "Help": "Nouthwest China (Xian)", - "Provider": "" - }, - { - "Value": "eos-yunnan.cmecloud.cn", - "Help": "Yunnan China (Kunming)", - "Provider": "" - }, - { - "Value": "eos-yunnan-2.cmecloud.cn", - "Help": "Yunnan China (Kunming-2)", - "Provider": "" - }, - { - "Value": "eos-tianjin-1.cmecloud.cn", - "Help": "Tianjin China (Tianjin)", - "Provider": "" - }, - { - "Value": "eos-jilin-1.cmecloud.cn", - "Help": "Jilin China (Changchun)", - "Provider": "" - }, - { - "Value": "eos-hubei-1.cmecloud.cn", - "Help": "Hubei China (Xiangyan)", - "Provider": "" - }, - { - "Value": "eos-jiangxi-1.cmecloud.cn", - "Help": "Jiangxi China (Nanchang)", - "Provider": "" - }, - { - "Value": "eos-gansu-1.cmecloud.cn", - "Help": "Gansu China (Lanzhou)", - "Provider": "" - }, - { - "Value": "eos-shanxi-1.cmecloud.cn", - "Help": "Shanxi China (Taiyuan)", - "Provider": "" - }, - { - "Value": "eos-liaoning-1.cmecloud.cn", - "Help": "Liaoning China (Shenyang)", - "Provider": "" - }, - { - "Value": "eos-hebei-1.cmecloud.cn", - "Help": "Hebei China (Shijiazhuang)", - "Provider": "" - }, - { - "Value": "eos-fujian-1.cmecloud.cn", - "Help": "Fujian China (Xiamen)", - "Provider": "" - }, - { - "Value": "eos-guangxi-1.cmecloud.cn", - "Help": "Guangxi China (Nanning)", - "Provider": "" - }, - { - "Value": "eos-anhui-1.cmecloud.cn", - "Help": "Anhui China (Huainan)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Arvan Cloud Object Storage (AOS) API.", - "Provider": "ArvanCloud", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3.ir-thr-at1.arvanstorage.ir", - "Help": "The default endpoint - a good choice if you are unsure.\nTehran Iran (Simin)", - "Provider": "" - }, - { - "Value": "s3.ir-tbz-sh1.arvanstorage.ir", - "Help": "Tabriz Iran (Shahriar)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for IBM COS S3 API.\n\nSpecify if using an IBM COS On Premise.", - "Provider": "IBMCOS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region Endpoint", - "Provider": "" - }, - { - "Value": "s3.dal.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region Dallas Endpoint", - "Provider": "" - }, - { - "Value": "s3.wdc.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region Washington DC Endpoint", - "Provider": "" - }, - { - "Value": "s3.sjc.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region San Jose Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.dal.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region Dallas Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.wdc.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region Washington DC Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.sjc.us.cloud-object-storage.appdomain.cloud", - "Help": "US Cross Region San Jose Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.us-east.cloud-object-storage.appdomain.cloud", - "Help": "US Region East Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.us-east.cloud-object-storage.appdomain.cloud", - "Help": "US Region East Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.us-south.cloud-object-storage.appdomain.cloud", - "Help": "US Region South Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.us-south.cloud-object-storage.appdomain.cloud", - "Help": "US Region South Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Endpoint", - "Provider": "" - }, - { - "Value": "s3.fra.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Frankfurt Endpoint", - "Provider": "" - }, - { - "Value": "s3.mil.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Milan Endpoint", - "Provider": "" - }, - { - "Value": "s3.ams.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Amsterdam Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.fra.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Frankfurt Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.mil.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Milan Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.ams.eu.cloud-object-storage.appdomain.cloud", - "Help": "EU Cross Region Amsterdam Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.eu-gb.cloud-object-storage.appdomain.cloud", - "Help": "Great Britain Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.eu-gb.cloud-object-storage.appdomain.cloud", - "Help": "Great Britain Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.eu-de.cloud-object-storage.appdomain.cloud", - "Help": "EU Region DE Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.eu-de.cloud-object-storage.appdomain.cloud", - "Help": "EU Region DE Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional Endpoint", - "Provider": "" - }, - { - "Value": "s3.tok.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional Tokyo Endpoint", - "Provider": "" - }, - { - "Value": "s3.hkg.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional HongKong Endpoint", - "Provider": "" - }, - { - "Value": "s3.seo.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional Seoul Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.tok.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional Tokyo Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.hkg.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional HongKong Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.seo.ap.cloud-object-storage.appdomain.cloud", - "Help": "APAC Cross Regional Seoul Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.jp-tok.cloud-object-storage.appdomain.cloud", - "Help": "APAC Region Japan Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.jp-tok.cloud-object-storage.appdomain.cloud", - "Help": "APAC Region Japan Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.au-syd.cloud-object-storage.appdomain.cloud", - "Help": "APAC Region Australia Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.au-syd.cloud-object-storage.appdomain.cloud", - "Help": "APAC Region Australia Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.ams03.cloud-object-storage.appdomain.cloud", - "Help": "Amsterdam Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.ams03.cloud-object-storage.appdomain.cloud", - "Help": "Amsterdam Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.che01.cloud-object-storage.appdomain.cloud", - "Help": "Chennai Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.che01.cloud-object-storage.appdomain.cloud", - "Help": "Chennai Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.mel01.cloud-object-storage.appdomain.cloud", - "Help": "Melbourne Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.mel01.cloud-object-storage.appdomain.cloud", - "Help": "Melbourne Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.osl01.cloud-object-storage.appdomain.cloud", - "Help": "Oslo Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.osl01.cloud-object-storage.appdomain.cloud", - "Help": "Oslo Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.tor01.cloud-object-storage.appdomain.cloud", - "Help": "Toronto Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.tor01.cloud-object-storage.appdomain.cloud", - "Help": "Toronto Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.seo01.cloud-object-storage.appdomain.cloud", - "Help": "Seoul Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.seo01.cloud-object-storage.appdomain.cloud", - "Help": "Seoul Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.mon01.cloud-object-storage.appdomain.cloud", - "Help": "Montreal Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.mon01.cloud-object-storage.appdomain.cloud", - "Help": "Montreal Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.mex01.cloud-object-storage.appdomain.cloud", - "Help": "Mexico Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.mex01.cloud-object-storage.appdomain.cloud", - "Help": "Mexico Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.sjc04.cloud-object-storage.appdomain.cloud", - "Help": "San Jose Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.sjc04.cloud-object-storage.appdomain.cloud", - "Help": "San Jose Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.mil01.cloud-object-storage.appdomain.cloud", - "Help": "Milan Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.mil01.cloud-object-storage.appdomain.cloud", - "Help": "Milan Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.hkg02.cloud-object-storage.appdomain.cloud", - "Help": "Hong Kong Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.hkg02.cloud-object-storage.appdomain.cloud", - "Help": "Hong Kong Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.par01.cloud-object-storage.appdomain.cloud", - "Help": "Paris Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.par01.cloud-object-storage.appdomain.cloud", - "Help": "Paris Single Site Private Endpoint", - "Provider": "" - }, - { - "Value": "s3.sng01.cloud-object-storage.appdomain.cloud", - "Help": "Singapore Single Site Endpoint", - "Provider": "" - }, - { - "Value": "s3.private.sng01.cloud-object-storage.appdomain.cloud", - "Help": "Singapore Single Site Private Endpoint", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for IONOS S3 Object Storage.\n\nSpecify the endpoint from the same region.", - "Provider": "IONOS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3-eu-central-1.ionoscloud.com", - "Help": "Frankfurt, Germany", - "Provider": "" - }, - { - "Value": "s3-eu-central-2.ionoscloud.com", - "Help": "Berlin, Germany", - "Provider": "" - }, - { - "Value": "s3-eu-south-2.ionoscloud.com", - "Help": "Logrono, Spain", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Petabox S3 Object Storage.\n\nSpecify the endpoint from the same region.", - "Provider": "Petabox", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3.petabox.io", - "Help": "US East (N. Virginia)", - "Provider": "" - }, - { - "Value": "s3.us-east-1.petabox.io", - "Help": "US East (N. Virginia)", - "Provider": "" - }, - { - "Value": "s3.eu-central-1.petabox.io", - "Help": "Europe (Frankfurt)", - "Provider": "" - }, - { - "Value": "s3.ap-southeast-1.petabox.io", - "Help": "Asia Pacific (Singapore)", - "Provider": "" - }, - { - "Value": "s3.me-south-1.petabox.io", - "Help": "Middle East (Bahrain)", - "Provider": "" - }, - { - "Value": "s3.sa-east-1.petabox.io", - "Help": "South America (São Paulo)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Leviia Object Storage API.", - "Provider": "Leviia", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3.leviia.com", - "Help": "The default endpoint\nLeviia", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Liara Object Storage API.", - "Provider": "Liara", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "storage.iran.liara.space", - "Help": "The default endpoint\nIran", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for OSS API.", - "Provider": "Alibaba", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "oss-accelerate.aliyuncs.com", - "Help": "Global Accelerate", - "Provider": "" - }, - { - "Value": "oss-accelerate-overseas.aliyuncs.com", - "Help": "Global Accelerate (outside mainland China)", - "Provider": "" - }, - { - "Value": "oss-cn-hangzhou.aliyuncs.com", - "Help": "East China 1 (Hangzhou)", - "Provider": "" - }, - { - "Value": "oss-cn-shanghai.aliyuncs.com", - "Help": "East China 2 (Shanghai)", - "Provider": "" - }, - { - "Value": "oss-cn-qingdao.aliyuncs.com", - "Help": "North China 1 (Qingdao)", - "Provider": "" - }, - { - "Value": "oss-cn-beijing.aliyuncs.com", - "Help": "North China 2 (Beijing)", - "Provider": "" - }, - { - "Value": "oss-cn-zhangjiakou.aliyuncs.com", - "Help": "North China 3 (Zhangjiakou)", - "Provider": "" - }, - { - "Value": "oss-cn-huhehaote.aliyuncs.com", - "Help": "North China 5 (Hohhot)", - "Provider": "" - }, - { - "Value": "oss-cn-wulanchabu.aliyuncs.com", - "Help": "North China 6 (Ulanqab)", - "Provider": "" - }, - { - "Value": "oss-cn-shenzhen.aliyuncs.com", - "Help": "South China 1 (Shenzhen)", - "Provider": "" - }, - { - "Value": "oss-cn-heyuan.aliyuncs.com", - "Help": "South China 2 (Heyuan)", - "Provider": "" - }, - { - "Value": "oss-cn-guangzhou.aliyuncs.com", - "Help": "South China 3 (Guangzhou)", - "Provider": "" - }, - { - "Value": "oss-cn-chengdu.aliyuncs.com", - "Help": "West China 1 (Chengdu)", - "Provider": "" - }, - { - "Value": "oss-cn-hongkong.aliyuncs.com", - "Help": "Hong Kong (Hong Kong)", - "Provider": "" - }, - { - "Value": "oss-us-west-1.aliyuncs.com", - "Help": "US West 1 (Silicon Valley)", - "Provider": "" - }, - { - "Value": "oss-us-east-1.aliyuncs.com", - "Help": "US East 1 (Virginia)", - "Provider": "" - }, - { - "Value": "oss-ap-southeast-1.aliyuncs.com", - "Help": "Southeast Asia Southeast 1 (Singapore)", - "Provider": "" - }, - { - "Value": "oss-ap-southeast-2.aliyuncs.com", - "Help": "Asia Pacific Southeast 2 (Sydney)", - "Provider": "" - }, - { - "Value": "oss-ap-southeast-3.aliyuncs.com", - "Help": "Southeast Asia Southeast 3 (Kuala Lumpur)", - "Provider": "" - }, - { - "Value": "oss-ap-southeast-5.aliyuncs.com", - "Help": "Asia Pacific Southeast 5 (Jakarta)", - "Provider": "" - }, - { - "Value": "oss-ap-northeast-1.aliyuncs.com", - "Help": "Asia Pacific Northeast 1 (Japan)", - "Provider": "" - }, - { - "Value": "oss-ap-south-1.aliyuncs.com", - "Help": "Asia Pacific South 1 (Mumbai)", - "Provider": "" - }, - { - "Value": "oss-eu-central-1.aliyuncs.com", - "Help": "Central Europe 1 (Frankfurt)", - "Provider": "" - }, - { - "Value": "oss-eu-west-1.aliyuncs.com", - "Help": "West Europe (London)", - "Provider": "" - }, - { - "Value": "oss-me-east-1.aliyuncs.com", - "Help": "Middle East 1 (Dubai)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for OBS API.", - "Provider": "HuaweiOBS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "obs.af-south-1.myhuaweicloud.com", - "Help": "AF-Johannesburg", - "Provider": "" - }, - { - "Value": "obs.ap-southeast-2.myhuaweicloud.com", - "Help": "AP-Bangkok", - "Provider": "" - }, - { - "Value": "obs.ap-southeast-3.myhuaweicloud.com", - "Help": "AP-Singapore", - "Provider": "" - }, - { - "Value": "obs.cn-east-3.myhuaweicloud.com", - "Help": "CN East-Shanghai1", - "Provider": "" - }, - { - "Value": "obs.cn-east-2.myhuaweicloud.com", - "Help": "CN East-Shanghai2", - "Provider": "" - }, - { - "Value": "obs.cn-north-1.myhuaweicloud.com", - "Help": "CN North-Beijing1", - "Provider": "" - }, - { - "Value": "obs.cn-north-4.myhuaweicloud.com", - "Help": "CN North-Beijing4", - "Provider": "" - }, - { - "Value": "obs.cn-south-1.myhuaweicloud.com", - "Help": "CN South-Guangzhou", - "Provider": "" - }, - { - "Value": "obs.ap-southeast-1.myhuaweicloud.com", - "Help": "CN-Hong Kong", - "Provider": "" - }, - { - "Value": "obs.sa-argentina-1.myhuaweicloud.com", - "Help": "LA-Buenos Aires1", - "Provider": "" - }, - { - "Value": "obs.sa-peru-1.myhuaweicloud.com", - "Help": "LA-Lima1", - "Provider": "" - }, - { - "Value": "obs.na-mexico-1.myhuaweicloud.com", - "Help": "LA-Mexico City1", - "Provider": "" - }, - { - "Value": "obs.sa-chile-1.myhuaweicloud.com", - "Help": "LA-Santiago2", - "Provider": "" - }, - { - "Value": "obs.sa-brazil-1.myhuaweicloud.com", - "Help": "LA-Sao Paulo1", - "Provider": "" - }, - { - "Value": "obs.ru-northwest-2.myhuaweicloud.com", - "Help": "RU-Moscow2", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Scaleway Object Storage.", - "Provider": "Scaleway", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3.nl-ams.scw.cloud", - "Help": "Amsterdam Endpoint", - "Provider": "" - }, - { - "Value": "s3.fr-par.scw.cloud", - "Help": "Paris Endpoint", - "Provider": "" - }, - { - "Value": "s3.pl-waw.scw.cloud", - "Help": "Warsaw Endpoint", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for StackPath Object Storage.", - "Provider": "StackPath", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3.us-east-2.stackpathstorage.com", - "Help": "US East Endpoint", - "Provider": "" - }, - { - "Value": "s3.us-west-1.stackpathstorage.com", - "Help": "US West Endpoint", - "Provider": "" - }, - { - "Value": "s3.eu-central-1.stackpathstorage.com", - "Help": "EU Endpoint", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Google Cloud Storage.", - "Provider": "GCS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "https://storage.googleapis.com", - "Help": "Google Cloud Storage endpoint", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Storj Gateway.", - "Provider": "Storj", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "gateway.storjshare.io", - "Help": "Global Hosted Gateway", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Synology C2 Object Storage API.", - "Provider": "Synology", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "eu-001.s3.synologyc2.net", - "Help": "EU Endpoint 1", - "Provider": "" - }, - { - "Value": "eu-002.s3.synologyc2.net", - "Help": "EU Endpoint 2", - "Provider": "" - }, - { - "Value": "us-001.s3.synologyc2.net", - "Help": "US Endpoint 1", - "Provider": "" - }, - { - "Value": "us-002.s3.synologyc2.net", - "Help": "US Endpoint 2", - "Provider": "" - }, - { - "Value": "tw-001.s3.synologyc2.net", - "Help": "TW Endpoint 1", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Tencent COS API.", - "Provider": "TencentCOS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "cos.ap-beijing.myqcloud.com", - "Help": "Beijing Region", - "Provider": "" - }, - { - "Value": "cos.ap-nanjing.myqcloud.com", - "Help": "Nanjing Region", - "Provider": "" - }, - { - "Value": "cos.ap-shanghai.myqcloud.com", - "Help": "Shanghai Region", - "Provider": "" - }, - { - "Value": "cos.ap-guangzhou.myqcloud.com", - "Help": "Guangzhou Region", - "Provider": "" - }, - { - "Value": "cos.ap-nanjing.myqcloud.com", - "Help": "Nanjing Region", - "Provider": "" - }, - { - "Value": "cos.ap-chengdu.myqcloud.com", - "Help": "Chengdu Region", - "Provider": "" - }, - { - "Value": "cos.ap-chongqing.myqcloud.com", - "Help": "Chongqing Region", - "Provider": "" - }, - { - "Value": "cos.ap-hongkong.myqcloud.com", - "Help": "Hong Kong (China) Region", - "Provider": "" - }, - { - "Value": "cos.ap-singapore.myqcloud.com", - "Help": "Singapore Region", - "Provider": "" - }, - { - "Value": "cos.ap-mumbai.myqcloud.com", - "Help": "Mumbai Region", - "Provider": "" - }, - { - "Value": "cos.ap-seoul.myqcloud.com", - "Help": "Seoul Region", - "Provider": "" - }, - { - "Value": "cos.ap-bangkok.myqcloud.com", - "Help": "Bangkok Region", - "Provider": "" - }, - { - "Value": "cos.ap-tokyo.myqcloud.com", - "Help": "Tokyo Region", - "Provider": "" - }, - { - "Value": "cos.na-siliconvalley.myqcloud.com", - "Help": "Silicon Valley Region", - "Provider": "" - }, - { - "Value": "cos.na-ashburn.myqcloud.com", - "Help": "Virginia Region", - "Provider": "" - }, - { - "Value": "cos.na-toronto.myqcloud.com", - "Help": "Toronto Region", - "Provider": "" - }, - { - "Value": "cos.eu-frankfurt.myqcloud.com", - "Help": "Frankfurt Region", - "Provider": "" - }, - { - "Value": "cos.eu-moscow.myqcloud.com", - "Help": "Moscow Region", - "Provider": "" - }, - { - "Value": "cos.accelerate.myqcloud.com", - "Help": "Use Tencent COS Accelerate Endpoint", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for RackCorp Object Storage.", - "Provider": "RackCorp", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3.rackcorp.com", - "Help": "Global (AnyCast) Endpoint", - "Provider": "" - }, - { - "Value": "au.s3.rackcorp.com", - "Help": "Australia (Anycast) Endpoint", - "Provider": "" - }, - { - "Value": "au-nsw.s3.rackcorp.com", - "Help": "Sydney (Australia) Endpoint", - "Provider": "" - }, - { - "Value": "au-qld.s3.rackcorp.com", - "Help": "Brisbane (Australia) Endpoint", - "Provider": "" - }, - { - "Value": "au-vic.s3.rackcorp.com", - "Help": "Melbourne (Australia) Endpoint", - "Provider": "" - }, - { - "Value": "au-wa.s3.rackcorp.com", - "Help": "Perth (Australia) Endpoint", - "Provider": "" - }, - { - "Value": "ph.s3.rackcorp.com", - "Help": "Manila (Philippines) Endpoint", - "Provider": "" - }, - { - "Value": "th.s3.rackcorp.com", - "Help": "Bangkok (Thailand) Endpoint", - "Provider": "" - }, - { - "Value": "hk.s3.rackcorp.com", - "Help": "HK (Hong Kong) Endpoint", - "Provider": "" - }, - { - "Value": "mn.s3.rackcorp.com", - "Help": "Ulaanbaatar (Mongolia) Endpoint", - "Provider": "" - }, - { - "Value": "kg.s3.rackcorp.com", - "Help": "Bishkek (Kyrgyzstan) Endpoint", - "Provider": "" - }, - { - "Value": "id.s3.rackcorp.com", - "Help": "Jakarta (Indonesia) Endpoint", - "Provider": "" - }, - { - "Value": "jp.s3.rackcorp.com", - "Help": "Tokyo (Japan) Endpoint", - "Provider": "" - }, - { - "Value": "sg.s3.rackcorp.com", - "Help": "SG (Singapore) Endpoint", - "Provider": "" - }, - { - "Value": "de.s3.rackcorp.com", - "Help": "Frankfurt (Germany) Endpoint", - "Provider": "" - }, - { - "Value": "us.s3.rackcorp.com", - "Help": "USA (AnyCast) Endpoint", - "Provider": "" - }, - { - "Value": "us-east-1.s3.rackcorp.com", - "Help": "New York (USA) Endpoint", - "Provider": "" - }, - { - "Value": "us-west-1.s3.rackcorp.com", - "Help": "Freemont (USA) Endpoint", - "Provider": "" - }, - { - "Value": "nz.s3.rackcorp.com", - "Help": "Auckland (New Zealand) Endpoint", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for Qiniu Object Storage.", - "Provider": "Qiniu", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "s3-cn-east-1.qiniucs.com", - "Help": "East China Endpoint 1", - "Provider": "" - }, - { - "Value": "s3-cn-east-2.qiniucs.com", - "Help": "East China Endpoint 2", - "Provider": "" - }, - { - "Value": "s3-cn-north-1.qiniucs.com", - "Help": "North China Endpoint 1", - "Provider": "" - }, - { - "Value": "s3-cn-south-1.qiniucs.com", - "Help": "South China Endpoint 1", - "Provider": "" - }, - { - "Value": "s3-us-north-1.qiniucs.com", - "Help": "North America Endpoint 1", - "Provider": "" - }, - { - "Value": "s3-ap-southeast-1.qiniucs.com", - "Help": "Southeast Asia Endpoint 1", - "Provider": "" - }, - { - "Value": "s3-ap-northeast-1.qiniucs.com", - "Help": "Northeast Asia Endpoint 1", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "endpoint", - "Help": "Endpoint for S3 API.\n\nRequired when using an S3 clone.", - "Provider": "!AWS,ArvanCloud,IBMCOS,IDrive,IONOS,TencentCOS,HuaweiOBS,Alibaba,ChinaMobile,GCS,Liara,Scaleway,StackPath,Storj,Synology,RackCorp,Qiniu,Petabox", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "objects-us-east-1.dream.io", - "Help": "Dream Objects endpoint", - "Provider": "Dreamhost" - }, - { - "Value": "syd1.digitaloceanspaces.com", - "Help": "DigitalOcean Spaces Sydney 1", - "Provider": "DigitalOcean" - }, - { - "Value": "sfo3.digitaloceanspaces.com", - "Help": "DigitalOcean Spaces San Francisco 3", - "Provider": "DigitalOcean" - }, - { - "Value": "fra1.digitaloceanspaces.com", - "Help": "DigitalOcean Spaces Frankfurt 1", - "Provider": "DigitalOcean" - }, - { - "Value": "nyc3.digitaloceanspaces.com", - "Help": "DigitalOcean Spaces New York 3", - "Provider": "DigitalOcean" - }, - { - "Value": "ams3.digitaloceanspaces.com", - "Help": "DigitalOcean Spaces Amsterdam 3", - "Provider": "DigitalOcean" - }, - { - "Value": "sgp1.digitaloceanspaces.com", - "Help": "DigitalOcean Spaces Singapore 1", - "Provider": "DigitalOcean" - }, - { - "Value": "localhost:8333", - "Help": "SeaweedFS S3 localhost", - "Provider": "SeaweedFS" - }, - { - "Value": "s3.us-east-1.lyvecloud.seagate.com", - "Help": "Seagate Lyve Cloud US East 1 (Virginia)", - "Provider": "LyveCloud" - }, - { - "Value": "s3.us-west-1.lyvecloud.seagate.com", - "Help": "Seagate Lyve Cloud US West 1 (California)", - "Provider": "LyveCloud" - }, - { - "Value": "s3.ap-southeast-1.lyvecloud.seagate.com", - "Help": "Seagate Lyve Cloud AP Southeast 1 (Singapore)", - "Provider": "LyveCloud" - }, - { - "Value": "s3.wasabisys.com", - "Help": "Wasabi US East 1 (N. Virginia)", - "Provider": "Wasabi" - }, - { - "Value": "s3.us-east-2.wasabisys.com", - "Help": "Wasabi US East 2 (N. Virginia)", - "Provider": "Wasabi" - }, - { - "Value": "s3.us-central-1.wasabisys.com", - "Help": "Wasabi US Central 1 (Texas)", - "Provider": "Wasabi" - }, - { - "Value": "s3.us-west-1.wasabisys.com", - "Help": "Wasabi US West 1 (Oregon)", - "Provider": "Wasabi" - }, - { - "Value": "s3.ca-central-1.wasabisys.com", - "Help": "Wasabi CA Central 1 (Toronto)", - "Provider": "Wasabi" - }, - { - "Value": "s3.eu-central-1.wasabisys.com", - "Help": "Wasabi EU Central 1 (Amsterdam)", - "Provider": "Wasabi" - }, - { - "Value": "s3.eu-central-2.wasabisys.com", - "Help": "Wasabi EU Central 2 (Frankfurt)", - "Provider": "Wasabi" - }, - { - "Value": "s3.eu-west-1.wasabisys.com", - "Help": "Wasabi EU West 1 (London)", - "Provider": "Wasabi" - }, - { - "Value": "s3.eu-west-2.wasabisys.com", - "Help": "Wasabi EU West 2 (Paris)", - "Provider": "Wasabi" - }, - { - "Value": "s3.ap-northeast-1.wasabisys.com", - "Help": "Wasabi AP Northeast 1 (Tokyo) endpoint", - "Provider": "Wasabi" - }, - { - "Value": "s3.ap-northeast-2.wasabisys.com", - "Help": "Wasabi AP Northeast 2 (Osaka) endpoint", - "Provider": "Wasabi" - }, - { - "Value": "s3.ap-southeast-1.wasabisys.com", - "Help": "Wasabi AP Southeast 1 (Singapore)", - "Provider": "Wasabi" - }, - { - "Value": "s3.ap-southeast-2.wasabisys.com", - "Help": "Wasabi AP Southeast 2 (Sydney)", - "Provider": "Wasabi" - }, - { - "Value": "storage.iran.liara.space", - "Help": "Liara Iran endpoint", - "Provider": "Liara" - }, - { - "Value": "s3.ir-thr-at1.arvanstorage.ir", - "Help": "ArvanCloud Tehran Iran (Simin) endpoint", - "Provider": "ArvanCloud" - }, - { - "Value": "s3.ir-tbz-sh1.arvanstorage.ir", - "Help": "ArvanCloud Tabriz Iran (Shahriar) endpoint", - "Provider": "ArvanCloud" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "location_constraint", - "Help": "Location constraint - must be set to match the Region.\n\nUsed when creating buckets only.", - "Provider": "AWS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Empty for US Region, Northern Virginia, or Pacific Northwest", - "Provider": "" - }, - { - "Value": "us-east-2", - "Help": "US East (Ohio) Region", - "Provider": "" - }, - { - "Value": "us-west-1", - "Help": "US West (Northern California) Region", - "Provider": "" - }, - { - "Value": "us-west-2", - "Help": "US West (Oregon) Region", - "Provider": "" - }, - { - "Value": "ca-central-1", - "Help": "Canada (Central) Region", - "Provider": "" - }, - { - "Value": "eu-west-1", - "Help": "EU (Ireland) Region", - "Provider": "" - }, - { - "Value": "eu-west-2", - "Help": "EU (London) Region", - "Provider": "" - }, - { - "Value": "eu-west-3", - "Help": "EU (Paris) Region", - "Provider": "" - }, - { - "Value": "eu-north-1", - "Help": "EU (Stockholm) Region", - "Provider": "" - }, - { - "Value": "eu-south-1", - "Help": "EU (Milan) Region", - "Provider": "" - }, - { - "Value": "EU", - "Help": "EU Region", - "Provider": "" - }, - { - "Value": "ap-southeast-1", - "Help": "Asia Pacific (Singapore) Region", - "Provider": "" - }, - { - "Value": "ap-southeast-2", - "Help": "Asia Pacific (Sydney) Region", - "Provider": "" - }, - { - "Value": "ap-northeast-1", - "Help": "Asia Pacific (Tokyo) Region", - "Provider": "" - }, - { - "Value": "ap-northeast-2", - "Help": "Asia Pacific (Seoul) Region", - "Provider": "" - }, - { - "Value": "ap-northeast-3", - "Help": "Asia Pacific (Osaka-Local) Region", - "Provider": "" - }, - { - "Value": "ap-south-1", - "Help": "Asia Pacific (Mumbai) Region", - "Provider": "" - }, - { - "Value": "ap-east-1", - "Help": "Asia Pacific (Hong Kong) Region", - "Provider": "" - }, - { - "Value": "sa-east-1", - "Help": "South America (Sao Paulo) Region", - "Provider": "" - }, - { - "Value": "me-south-1", - "Help": "Middle East (Bahrain) Region", - "Provider": "" - }, - { - "Value": "af-south-1", - "Help": "Africa (Cape Town) Region", - "Provider": "" - }, - { - "Value": "cn-north-1", - "Help": "China (Beijing) Region", - "Provider": "" - }, - { - "Value": "cn-northwest-1", - "Help": "China (Ningxia) Region", - "Provider": "" - }, - { - "Value": "us-gov-east-1", - "Help": "AWS GovCloud (US-East) Region", - "Provider": "" - }, - { - "Value": "us-gov-west-1", - "Help": "AWS GovCloud (US) Region", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "location_constraint", - "Help": "Location constraint - must match endpoint.\n\nUsed when creating buckets only.", - "Provider": "ChinaMobile", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "wuxi1", - "Help": "East China (Suzhou)", - "Provider": "" - }, - { - "Value": "jinan1", - "Help": "East China (Jinan)", - "Provider": "" - }, - { - "Value": "ningbo1", - "Help": "East China (Hangzhou)", - "Provider": "" - }, - { - "Value": "shanghai1", - "Help": "East China (Shanghai-1)", - "Provider": "" - }, - { - "Value": "zhengzhou1", - "Help": "Central China (Zhengzhou)", - "Provider": "" - }, - { - "Value": "hunan1", - "Help": "Central China (Changsha-1)", - "Provider": "" - }, - { - "Value": "zhuzhou1", - "Help": "Central China (Changsha-2)", - "Provider": "" - }, - { - "Value": "guangzhou1", - "Help": "South China (Guangzhou-2)", - "Provider": "" - }, - { - "Value": "dongguan1", - "Help": "South China (Guangzhou-3)", - "Provider": "" - }, - { - "Value": "beijing1", - "Help": "North China (Beijing-1)", - "Provider": "" - }, - { - "Value": "beijing2", - "Help": "North China (Beijing-2)", - "Provider": "" - }, - { - "Value": "beijing4", - "Help": "North China (Beijing-3)", - "Provider": "" - }, - { - "Value": "huhehaote1", - "Help": "North China (Huhehaote)", - "Provider": "" - }, - { - "Value": "chengdu1", - "Help": "Southwest China (Chengdu)", - "Provider": "" - }, - { - "Value": "chongqing1", - "Help": "Southwest China (Chongqing)", - "Provider": "" - }, - { - "Value": "guiyang1", - "Help": "Southwest China (Guiyang)", - "Provider": "" - }, - { - "Value": "xian1", - "Help": "Nouthwest China (Xian)", - "Provider": "" - }, - { - "Value": "yunnan", - "Help": "Yunnan China (Kunming)", - "Provider": "" - }, - { - "Value": "yunnan2", - "Help": "Yunnan China (Kunming-2)", - "Provider": "" - }, - { - "Value": "tianjin1", - "Help": "Tianjin China (Tianjin)", - "Provider": "" - }, - { - "Value": "jilin1", - "Help": "Jilin China (Changchun)", - "Provider": "" - }, - { - "Value": "hubei1", - "Help": "Hubei China (Xiangyan)", - "Provider": "" - }, - { - "Value": "jiangxi1", - "Help": "Jiangxi China (Nanchang)", - "Provider": "" - }, - { - "Value": "gansu1", - "Help": "Gansu China (Lanzhou)", - "Provider": "" - }, - { - "Value": "shanxi1", - "Help": "Shanxi China (Taiyuan)", - "Provider": "" - }, - { - "Value": "liaoning1", - "Help": "Liaoning China (Shenyang)", - "Provider": "" - }, - { - "Value": "hebei1", - "Help": "Hebei China (Shijiazhuang)", - "Provider": "" - }, - { - "Value": "fujian1", - "Help": "Fujian China (Xiamen)", - "Provider": "" - }, - { - "Value": "guangxi1", - "Help": "Guangxi China (Nanning)", - "Provider": "" - }, - { - "Value": "anhui1", - "Help": "Anhui China (Huainan)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "location_constraint", - "Help": "Location constraint - must match endpoint.\n\nUsed when creating buckets only.", - "Provider": "ArvanCloud", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "ir-thr-at1", - "Help": "Tehran Iran (Simin)", - "Provider": "" - }, - { - "Value": "ir-tbz-sh1", - "Help": "Tabriz Iran (Shahriar)", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "location_constraint", - "Help": "Location constraint - must match endpoint when using IBM Cloud Public.\n\nFor on-prem COS, do not make a selection from this list, hit enter.", - "Provider": "IBMCOS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "us-standard", - "Help": "US Cross Region Standard", - "Provider": "" - }, - { - "Value": "us-vault", - "Help": "US Cross Region Vault", - "Provider": "" - }, - { - "Value": "us-cold", - "Help": "US Cross Region Cold", - "Provider": "" - }, - { - "Value": "us-flex", - "Help": "US Cross Region Flex", - "Provider": "" - }, - { - "Value": "us-east-standard", - "Help": "US East Region Standard", - "Provider": "" - }, - { - "Value": "us-east-vault", - "Help": "US East Region Vault", - "Provider": "" - }, - { - "Value": "us-east-cold", - "Help": "US East Region Cold", - "Provider": "" - }, - { - "Value": "us-east-flex", - "Help": "US East Region Flex", - "Provider": "" - }, - { - "Value": "us-south-standard", - "Help": "US South Region Standard", - "Provider": "" - }, - { - "Value": "us-south-vault", - "Help": "US South Region Vault", - "Provider": "" - }, - { - "Value": "us-south-cold", - "Help": "US South Region Cold", - "Provider": "" - }, - { - "Value": "us-south-flex", - "Help": "US South Region Flex", - "Provider": "" - }, - { - "Value": "eu-standard", - "Help": "EU Cross Region Standard", - "Provider": "" - }, - { - "Value": "eu-vault", - "Help": "EU Cross Region Vault", - "Provider": "" - }, - { - "Value": "eu-cold", - "Help": "EU Cross Region Cold", - "Provider": "" - }, - { - "Value": "eu-flex", - "Help": "EU Cross Region Flex", - "Provider": "" - }, - { - "Value": "eu-gb-standard", - "Help": "Great Britain Standard", - "Provider": "" - }, - { - "Value": "eu-gb-vault", - "Help": "Great Britain Vault", - "Provider": "" - }, - { - "Value": "eu-gb-cold", - "Help": "Great Britain Cold", - "Provider": "" - }, - { - "Value": "eu-gb-flex", - "Help": "Great Britain Flex", - "Provider": "" - }, - { - "Value": "ap-standard", - "Help": "APAC Standard", - "Provider": "" - }, - { - "Value": "ap-vault", - "Help": "APAC Vault", - "Provider": "" - }, - { - "Value": "ap-cold", - "Help": "APAC Cold", - "Provider": "" - }, - { - "Value": "ap-flex", - "Help": "APAC Flex", - "Provider": "" - }, - { - "Value": "mel01-standard", - "Help": "Melbourne Standard", - "Provider": "" - }, - { - "Value": "mel01-vault", - "Help": "Melbourne Vault", - "Provider": "" - }, - { - "Value": "mel01-cold", - "Help": "Melbourne Cold", - "Provider": "" - }, - { - "Value": "mel01-flex", - "Help": "Melbourne Flex", - "Provider": "" - }, - { - "Value": "tor01-standard", - "Help": "Toronto Standard", - "Provider": "" - }, - { - "Value": "tor01-vault", - "Help": "Toronto Vault", - "Provider": "" - }, - { - "Value": "tor01-cold", - "Help": "Toronto Cold", - "Provider": "" - }, - { - "Value": "tor01-flex", - "Help": "Toronto Flex", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "location_constraint", - "Help": "Location constraint - the location where your bucket will be located and your data stored.\n", - "Provider": "RackCorp", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "global", - "Help": "Global CDN Region", - "Provider": "" - }, - { - "Value": "au", - "Help": "Australia (All locations)", - "Provider": "" - }, - { - "Value": "au-nsw", - "Help": "NSW (Australia) Region", - "Provider": "" - }, - { - "Value": "au-qld", - "Help": "QLD (Australia) Region", - "Provider": "" - }, - { - "Value": "au-vic", - "Help": "VIC (Australia) Region", - "Provider": "" - }, - { - "Value": "au-wa", - "Help": "Perth (Australia) Region", - "Provider": "" - }, - { - "Value": "ph", - "Help": "Manila (Philippines) Region", - "Provider": "" - }, - { - "Value": "th", - "Help": "Bangkok (Thailand) Region", - "Provider": "" - }, - { - "Value": "hk", - "Help": "HK (Hong Kong) Region", - "Provider": "" - }, - { - "Value": "mn", - "Help": "Ulaanbaatar (Mongolia) Region", - "Provider": "" - }, - { - "Value": "kg", - "Help": "Bishkek (Kyrgyzstan) Region", - "Provider": "" - }, - { - "Value": "id", - "Help": "Jakarta (Indonesia) Region", - "Provider": "" - }, - { - "Value": "jp", - "Help": "Tokyo (Japan) Region", - "Provider": "" - }, - { - "Value": "sg", - "Help": "SG (Singapore) Region", - "Provider": "" - }, - { - "Value": "de", - "Help": "Frankfurt (Germany) Region", - "Provider": "" - }, - { - "Value": "us", - "Help": "USA (AnyCast) Region", - "Provider": "" - }, - { - "Value": "us-east-1", - "Help": "New York (USA) Region", - "Provider": "" - }, - { - "Value": "us-west-1", - "Help": "Freemont (USA) Region", - "Provider": "" - }, - { - "Value": "nz", - "Help": "Auckland (New Zealand) Region", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "location_constraint", - "Help": "Location constraint - must be set to match the Region.\n\nUsed when creating buckets only.", - "Provider": "Qiniu", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "cn-east-1", - "Help": "East China Region 1", - "Provider": "" - }, - { - "Value": "cn-east-2", - "Help": "East China Region 2", - "Provider": "" - }, - { - "Value": "cn-north-1", - "Help": "North China Region 1", - "Provider": "" - }, - { - "Value": "cn-south-1", - "Help": "South China Region 1", - "Provider": "" - }, - { - "Value": "us-north-1", - "Help": "North America Region 1", - "Provider": "" - }, - { - "Value": "ap-southeast-1", - "Help": "Southeast Asia Region 1", - "Provider": "" - }, - { - "Value": "ap-northeast-1", - "Help": "Northeast Asia Region 1", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "location_constraint", - "Help": "Location constraint - must be set to match the Region.\n\nLeave blank if not sure. Used when creating buckets only.", - "Provider": "!AWS,Alibaba,ArvanCloud,HuaweiOBS,ChinaMobile,Cloudflare,IBMCOS,IDrive,IONOS,Leviia,Liara,Qiniu,RackCorp,Scaleway,StackPath,Storj,TencentCOS,Petabox", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "acl", - "Help": "Canned ACL used when creating buckets and storing or copying objects.\n\nThis ACL is used for creating objects and if bucket_acl isn't set, for creating buckets too.\n\nFor more info visit https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl\n\nNote that this ACL is applied when server-side copying objects as S3\ndoesn't copy the ACL from the source but rather writes a fresh one.\n\nIf the acl is an empty string then no X-Amz-Acl: header is added and\nthe default (private) will be used.\n", - "Provider": "!Storj,Synology,Cloudflare", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "default", - "Help": "Owner gets Full_CONTROL.\nNo one else has access rights (default).", - "Provider": "TencentCOS" - }, - { - "Value": "private", - "Help": "Owner gets FULL_CONTROL.\nNo one else has access rights (default).", - "Provider": "!IBMCOS,TencentCOS" - }, - { - "Value": "public-read", - "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ access.", - "Provider": "!IBMCOS" - }, - { - "Value": "public-read-write", - "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ and WRITE access.\nGranting this on a bucket is generally not recommended.", - "Provider": "!IBMCOS" - }, - { - "Value": "authenticated-read", - "Help": "Owner gets FULL_CONTROL.\nThe AuthenticatedUsers group gets READ access.", - "Provider": "!IBMCOS" - }, - { - "Value": "bucket-owner-read", - "Help": "Object owner gets FULL_CONTROL.\nBucket owner gets READ access.\nIf you specify this canned ACL when creating a bucket, Amazon S3 ignores it.", - "Provider": "!IBMCOS,ChinaMobile" - }, - { - "Value": "bucket-owner-full-control", - "Help": "Both the object owner and the bucket owner get FULL_CONTROL over the object.\nIf you specify this canned ACL when creating a bucket, Amazon S3 ignores it.", - "Provider": "!IBMCOS,ChinaMobile" - }, - { - "Value": "private", - "Help": "Owner gets FULL_CONTROL.\nNo one else has access rights (default).\nThis acl is available on IBM Cloud (Infra), IBM Cloud (Storage), On-Premise COS.", - "Provider": "IBMCOS" - }, - { - "Value": "public-read", - "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ access.\nThis acl is available on IBM Cloud (Infra), IBM Cloud (Storage), On-Premise IBM COS.", - "Provider": "IBMCOS" - }, - { - "Value": "public-read-write", - "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ and WRITE access.\nThis acl is available on IBM Cloud (Infra), On-Premise IBM COS.", - "Provider": "IBMCOS" - }, - { - "Value": "authenticated-read", - "Help": "Owner gets FULL_CONTROL.\nThe AuthenticatedUsers group gets READ access.\nNot supported on Buckets.\nThis acl is available on IBM Cloud (Infra) and On-Premise IBM COS.", - "Provider": "IBMCOS" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "bucket_acl", - "Help": "Canned ACL used when creating buckets.\n\nFor more info visit https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl\n\nNote that this ACL is applied when only when creating buckets. If it\nisn't set then \"acl\" is used instead.\n\nIf the \"acl\" and \"bucket_acl\" are empty strings then no X-Amz-Acl:\nheader is added and the default (private) will be used.\n", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "private", - "Help": "Owner gets FULL_CONTROL.\nNo one else has access rights (default).", - "Provider": "" - }, - { - "Value": "public-read", - "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ access.", - "Provider": "" - }, - { - "Value": "public-read-write", - "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ and WRITE access.\nGranting this on a bucket is generally not recommended.", - "Provider": "" - }, - { - "Value": "authenticated-read", - "Help": "Owner gets FULL_CONTROL.\nThe AuthenticatedUsers group gets READ access.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "requester_pays", - "Help": "Enables requester pays option when interacting with S3 bucket.", - "Provider": "AWS", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "server_side_encryption", - "Help": "The server-side encryption algorithm used when storing this object in S3.", - "Provider": "AWS,Ceph,ChinaMobile,Minio", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - }, - { - "Value": "AES256", - "Help": "AES256", - "Provider": "" - }, - { - "Value": "aws:kms", - "Help": "aws:kms", - "Provider": "!ChinaMobile" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_customer_algorithm", - "Help": "If using SSE-C, the server-side encryption algorithm used when storing this object in S3.", - "Provider": "AWS,Ceph,ChinaMobile,Minio", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - }, - { - "Value": "AES256", - "Help": "AES256", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_kms_key_id", - "Help": "If using KMS ID you must provide the ARN of Key.", - "Provider": "AWS,Ceph,Minio", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - }, - { - "Value": "arn:aws:kms:us-east-1:*", - "Help": "arn:aws:kms:*", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_customer_key", - "Help": "To use SSE-C you may provide the secret encryption key used to encrypt/decrypt your data.\n\nAlternatively you can provide --sse-customer-key-base64.", - "Provider": "AWS,Ceph,ChinaMobile,Minio", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_customer_key_base64", - "Help": "If using SSE-C you must provide the secret encryption key encoded in base64 format to encrypt/decrypt your data.\n\nAlternatively you can provide --sse-customer-key.", - "Provider": "AWS,Ceph,ChinaMobile,Minio", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sse_customer_key_md5", - "Help": "If using SSE-C you may provide the secret encryption key MD5 checksum (optional).\n\nIf you leave it blank, this is calculated automatically from the sse_customer_key provided.\n", - "Provider": "AWS,Ceph,ChinaMobile,Minio", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "None", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in S3.", - "Provider": "AWS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Default", - "Provider": "" - }, - { - "Value": "STANDARD", - "Help": "Standard storage class", - "Provider": "" - }, - { - "Value": "REDUCED_REDUNDANCY", - "Help": "Reduced redundancy storage class", - "Provider": "" - }, - { - "Value": "STANDARD_IA", - "Help": "Standard Infrequent Access storage class", - "Provider": "" - }, - { - "Value": "ONEZONE_IA", - "Help": "One Zone Infrequent Access storage class", - "Provider": "" - }, - { - "Value": "GLACIER", - "Help": "Glacier storage class", - "Provider": "" - }, - { - "Value": "DEEP_ARCHIVE", - "Help": "Glacier Deep Archive storage class", - "Provider": "" - }, - { - "Value": "INTELLIGENT_TIERING", - "Help": "Intelligent-Tiering storage class", - "Provider": "" - }, - { - "Value": "GLACIER_IR", - "Help": "Glacier Instant Retrieval storage class", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in OSS.", - "Provider": "Alibaba", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Default", - "Provider": "" - }, - { - "Value": "STANDARD", - "Help": "Standard storage class", - "Provider": "" - }, - { - "Value": "GLACIER", - "Help": "Archive storage mode", - "Provider": "" - }, - { - "Value": "STANDARD_IA", - "Help": "Infrequent access storage mode", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in ChinaMobile.", - "Provider": "ChinaMobile", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Default", - "Provider": "" - }, - { - "Value": "STANDARD", - "Help": "Standard storage class", - "Provider": "" - }, - { - "Value": "GLACIER", - "Help": "Archive storage mode", - "Provider": "" - }, - { - "Value": "STANDARD_IA", - "Help": "Infrequent access storage mode", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in Liara", - "Provider": "Liara", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "STANDARD", - "Help": "Standard storage class", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in ArvanCloud.", - "Provider": "ArvanCloud", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "STANDARD", - "Help": "Standard storage class", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in Tencent COS.", - "Provider": "TencentCOS", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Default", - "Provider": "" - }, - { - "Value": "STANDARD", - "Help": "Standard storage class", - "Provider": "" - }, - { - "Value": "ARCHIVE", - "Help": "Archive storage mode", - "Provider": "" - }, - { - "Value": "STANDARD_IA", - "Help": "Infrequent access storage mode", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in S3.", - "Provider": "Scaleway", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Default.", - "Provider": "" - }, - { - "Value": "STANDARD", - "Help": "The Standard class for any upload.\nSuitable for on-demand content like streaming or CDN.\nAvailable in all regions.", - "Provider": "" - }, - { - "Value": "GLACIER", - "Help": "Archived storage.\nPrices are lower, but it needs to be restored first to be accessed.\nAvailable in FR-PAR and NL-AMS regions.", - "Provider": "" - }, - { - "Value": "ONEZONE_IA", - "Help": "One Zone - Infrequent Access.\nA good choice for storing secondary backup copies or easily re-creatable data.\nAvailable in the FR-PAR region only.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_class", - "Help": "The storage class to use when storing new objects in Qiniu.", - "Provider": "Qiniu", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "STANDARD", - "Help": "Standard storage class", - "Provider": "" - }, - { - "Value": "LINE", - "Help": "Infrequent access storage mode", - "Provider": "" - }, - { - "Value": "GLACIER", - "Help": "Archive storage mode", - "Provider": "" - }, - { - "Value": "DEEP_ARCHIVE", - "Help": "Deep archive storage mode", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to chunked upload.\n\nAny files larger than this will be uploaded in chunks of chunk_size.\nThe minimum is 0 and the maximum is 5 GiB.", - "Provider": "", - "Default": 209715200, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "200Mi", - "ValueStr": "200Mi", - "Type": "SizeSuffix" - }, - { - "Name": "chunk_size", - "Help": "Chunk size to use for uploading.\n\nWhen uploading files larger than upload_cutoff or files with unknown\nsize (e.g. from \"rclone rcat\" or uploaded with \"rclone mount\" or google\nphotos or google docs) they will be uploaded as multipart uploads\nusing this chunk size.\n\nNote that \"--s3-upload-concurrency\" chunks of this size are buffered\nin memory per transfer.\n\nIf you are transferring large files over high-speed links and you have\nenough memory, then increasing this will speed up the transfers.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of known size to stay below the 10,000 chunks limit.\n\nFiles of unknown size are uploaded with the configured\nchunk_size. Since the default chunk size is 5 MiB and there can be at\nmost 10,000 chunks, this means that by default the maximum size of\na file you can stream upload is 48 GiB. If you wish to stream upload\nlarger files then you will need to increase chunk_size.\n\nIncreasing the chunk size decreases the accuracy of the progress\nstatistics displayed with \"-P\" flag. Rclone treats chunk as sent when\nit's buffered by the AWS SDK, when in fact it may still be uploading.\nA bigger chunk size means a bigger AWS SDK buffer and progress\nreporting more deviating from the truth.\n", - "Provider": "", - "Default": 5242880, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "5Mi", - "ValueStr": "5Mi", - "Type": "SizeSuffix" - }, - { - "Name": "max_upload_parts", - "Help": "Maximum number of parts in a multipart upload.\n\nThis option defines the maximum number of multipart chunks to use\nwhen doing a multipart upload.\n\nThis can be useful if a service does not support the AWS S3\nspecification of 10,000 chunks.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of a known size to stay below this number of chunks limit.\n", - "Provider": "", - "Default": 10000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10000", - "ValueStr": "10000", - "Type": "int" - }, - { - "Name": "copy_cutoff", - "Help": "Cutoff for switching to multipart copy.\n\nAny files larger than this that need to be server-side copied will be\ncopied in chunks of this size.\n\nThe minimum is 0 and the maximum is 5 GiB.", - "Provider": "", - "Default": 4999610368, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4.656Gi", - "ValueStr": "4.656Gi", - "Type": "SizeSuffix" - }, - { - "Name": "disable_checksum", - "Help": "Don't store MD5 checksum with object metadata.\n\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "shared_credentials_file", - "Help": "Path to the shared credentials file.\n\nIf env_auth = true then rclone can use a shared credentials file.\n\nIf this variable is empty rclone will look for the\n\"AWS_SHARED_CREDENTIALS_FILE\" env variable. If the env value is empty\nit will default to the current user's home directory.\n\n Linux/OSX: \"$HOME/.aws/credentials\"\n Windows: \"%USERPROFILE%\\.aws\\credentials\"\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "profile", - "Help": "Profile to use in the shared credentials file.\n\nIf env_auth = true then rclone can use a shared credentials file. This\nvariable controls which profile is used in that file.\n\nIf empty it will default to the environment variable \"AWS_PROFILE\" or\n\"default\" if that environment variable is also not set.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "session_token", - "Help": "An AWS session token.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "upload_concurrency", - "Help": "Concurrency for multipart uploads.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently.\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", - "Provider": "", - "Default": 4, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "4", - "ValueStr": "4", - "Type": "int" - }, - { - "Name": "force_path_style", - "Help": "If true use path style access if false use virtual hosted style.\n\nIf this is true (the default) then rclone will use path style access,\nif false then rclone will use virtual path style. See [the AWS S3\ndocs](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingBucket.html#access-bucket-intro)\nfor more info.\n\nSome providers (e.g. AWS, Aliyun OSS, Netease COS, or Tencent COS) require this set to\nfalse - rclone will do this automatically based on the provider\nsetting.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "v2_auth", - "Help": "If true use v2 authentication.\n\nIf this is false (the default) then rclone will use v4 authentication.\nIf it is set then rclone will use v2 authentication.\n\nUse this only if v4 signatures don't work, e.g. pre Jewel/v10 CEPH.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "use_accelerate_endpoint", - "Help": "If true use the AWS S3 accelerated endpoint.\n\nSee: [AWS S3 Transfer acceleration](https://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration-examples.html)", - "Provider": "AWS", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "leave_parts_on_error", - "Help": "If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery.\n\nIt should be set to true for resuming uploads across different sessions.\n\nWARNING: Storing parts of an incomplete multipart upload counts towards space usage on S3 and will add additional costs if not cleaned up.\n", - "Provider": "AWS", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "list_chunk", - "Help": "Size of listing chunk (response list for each ListObject S3 request).\n\nThis option is also known as \"MaxKeys\", \"max-items\", or \"page-size\" from the AWS S3 specification.\nMost services truncate the response list to 1000 objects even if requested more than that.\nIn AWS S3 this is a global maximum and cannot be changed, see [AWS S3](https://docs.aws.amazon.com/cli/latest/reference/s3/ls.html).\nIn Ceph, this can be increased with the \"rgw list buckets max chunk\" option.\n", - "Provider": "", - "Default": 1000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1000", - "ValueStr": "1000", - "Type": "int" - }, - { - "Name": "list_version", - "Help": "Version of ListObjects to use: 1,2 or 0 for auto.\n\nWhen S3 originally launched it only provided the ListObjects call to\nenumerate objects in a bucket.\n\nHowever in May 2016 the ListObjectsV2 call was introduced. This is\nmuch higher performance and should be used if at all possible.\n\nIf set to the default, 0, rclone will guess according to the provider\nset which list objects method to call. If it guesses wrong, then it\nmay be set manually here.\n", - "Provider": "", - "Default": 0, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "0", - "ValueStr": "0", - "Type": "int" - }, - { - "Name": "list_url_encode", - "Help": "Whether to url encode listings: true/false/unset\n\nSome providers support URL encoding listings and where this is\navailable this is more reliable when using control characters in file\nnames. If this is set to unset (the default) then rclone will choose\naccording to the provider setting what to apply, but you can override\nrclone's choice here.\n", - "Provider": "", - "Default": { - "Value": false, - "Valid": false - }, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "unset", - "ValueStr": "unset", - "Type": "Tristate" - }, - { - "Name": "no_check_bucket", - "Help": "If set, don't attempt to check the bucket exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the bucket exists already.\n\nIt can also be needed if the user you are using does not have bucket\ncreation permissions. Before v1.52.0 this would have passed silently\ndue to a bug.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_head", - "Help": "If set, don't HEAD uploaded objects to check integrity.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does.\n\nSetting it means that if rclone receives a 200 OK message after\nuploading an object with PUT then it will assume that it got uploaded\nproperly.\n\nIn particular it will assume:\n\n- the metadata, including modtime, storage class and content type was as uploaded\n- the size was as uploaded\n\nIt reads the following items from the response for a single part PUT:\n\n- the MD5SUM\n- The uploaded date\n\nFor multipart uploads these items aren't read.\n\nIf an source object of unknown length is uploaded then rclone **will** do a\nHEAD request.\n\nSetting this flag increases the chance for undetected upload failures,\nin particular an incorrect size, so it isn't recommended for normal\noperation. In practice the chance of an undetected upload failure is\nvery small even with this flag.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_head_object", - "Help": "If set, do not do HEAD before GET when getting objects.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50331650, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,InvalidUtf8,Dot", - "ValueStr": "Slash,InvalidUtf8,Dot", - "Type": "MultiEncoder" - }, - { - "Name": "memory_pool_flush_time", - "Help": "How often internal memory buffer pools will be flushed.\n\nUploads which requires additional buffers (f.e multipart) will use memory pool for allocations.\nThis option controls how often unused buffers will be removed from the pool.", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "memory_pool_use_mmap", - "Help": "Whether to use mmap buffers in internal memory pool.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_http2", - "Help": "Disable usage of http2 for S3 backends.\n\nThere is currently an unsolved issue with the s3 (specifically minio) backend\nand HTTP/2. HTTP/2 is enabled by default for the s3 backend but can be\ndisabled here. When the issue is solved this flag will be removed.\n\nSee: https://github.com/rclone/rclone/issues/4673, https://github.com/rclone/rclone/issues/3631\n\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "download_url", - "Help": "Custom endpoint for downloads.\nThis is usually set to a CloudFront CDN URL as AWS S3 offers\ncheaper egress for data downloaded through the CloudFront network.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "directory_markers", - "Help": "Upload an empty object with a trailing slash when a new directory is created\n\nEmpty folders are unsupported for bucket based remotes, this option creates an empty\nobject ending with \"/\", to persist the folder.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "use_multipart_etag", - "Help": "Whether to use ETag in multipart uploads for verification\n\nThis should be true, false or left unset to use the default for the provider.\n", - "Provider": "", - "Default": { - "Value": false, - "Valid": false - }, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "unset", - "ValueStr": "unset", - "Type": "Tristate" - }, - { - "Name": "use_presigned_request", - "Help": "Whether to use a presigned request or PutObject for single part uploads\n\nIf this is false rclone will use PutObject from the AWS SDK to upload\nan object.\n\nVersions of rclone \u003c 1.59 use presigned requests to upload a single\npart object and setting this flag to true will re-enable that\nfunctionality. This shouldn't be necessary except in exceptional\ncircumstances or for testing.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "versions", - "Help": "Include old versions in directory listings.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "version_at", - "Help": "Show file versions as they were at the specified time.\n\nThe parameter should be a date, \"2006-01-02\", datetime \"2006-01-02\n15:04:05\" or a duration for that long ago, eg \"100d\" or \"1h\".\n\nNote that when using this no file write operations are permitted,\nso you can't upload files or delete them.\n\nSee [the time option docs](/docs/#time-option) for valid formats.\n", - "Provider": "", - "Default": "0001-01-01T00:00:00Z", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "off", - "ValueStr": "off", - "Type": "Time" - }, - { - "Name": "decompress", - "Help": "If set this will decompress gzip encoded objects.\n\nIt is possible to upload objects to S3 with \"Content-Encoding: gzip\"\nset. Normally rclone will download these files as compressed objects.\n\nIf this flag is set then rclone will decompress these files with\n\"Content-Encoding: gzip\" as they are received. This means that rclone\ncan't check the size and hash but the file contents will be decompressed.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "might_gzip", - "Help": "Set this if the backend might gzip objects.\n\nNormally providers will not alter objects when they are downloaded. If\nan object was not uploaded with `Content-Encoding: gzip` then it won't\nbe set on download.\n\nHowever some providers may gzip objects even if they weren't uploaded\nwith `Content-Encoding: gzip` (eg Cloudflare).\n\nA symptom of this would be receiving errors like\n\n ERROR corrupted on transfer: sizes differ NNN vs MMM\n\nIf you set this flag and rclone downloads an object with\nContent-Encoding: gzip set and chunked transfer encoding, then rclone\nwill decompress the object on the fly.\n\nIf this is set to unset (the default) then rclone will choose\naccording to the provider setting what to apply, but you can override\nrclone's choice here.\n", - "Provider": "", - "Default": { - "Value": false, - "Valid": false - }, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "unset", - "ValueStr": "unset", - "Type": "Tristate" - }, - { - "Name": "use_accept_encoding_gzip", - "Help": "Whether to send `Accept-Encoding: gzip` header.\n\nBy default, rclone will append `Accept-Encoding: gzip` to the request to download\ncompressed objects whenever possible.\n\nHowever some providers such as Google Cloud Storage may alter the HTTP headers, breaking\nthe signature of the request.\n\nA symptom of this would be receiving errors like\n\n\tSignatureDoesNotMatch: The request signature we calculated does not match the signature you provided.\n\nIn this case, you might want to try disabling this option.\n", - "Provider": "", - "Default": { - "Value": false, - "Valid": false - }, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "unset", - "ValueStr": "unset", - "Type": "Tristate" - }, - { - "Name": "no_system_metadata", - "Help": "Suppress setting and reading of system metadata", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "sts_endpoint", - "Help": "Endpoint for STS.\n\nLeave blank if using AWS to use the default endpoint for the region.", - "Provider": "AWS", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region where your data stored.\n", + "Provider": "Selectel", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "ru-1", + "Help": "St. Petersburg" + } ], - "CommandHelp": [ - { - "Name": "restore", - "Short": "Restore objects from GLACIER to normal storage", - "Long": "This command can be used to restore one or more objects from GLACIER\nto normal storage.\n\nUsage Examples:\n\n rclone backend restore s3:bucket/path/to/object [-o priority=PRIORITY] [-o lifetime=DAYS]\n rclone backend restore s3:bucket/path/to/directory [-o priority=PRIORITY] [-o lifetime=DAYS]\n rclone backend restore s3:bucket [-o priority=PRIORITY] [-o lifetime=DAYS]\n\nThis flag also obeys the filters. Test first with --interactive/-i or --dry-run flags\n\n rclone --interactive backend restore --include \"*.txt\" s3:bucket/path -o priority=Standard\n\nAll the objects shown will be marked for restore, then\n\n rclone backend restore --include \"*.txt\" s3:bucket/path -o priority=Standard\n\nIt returns a list of status dictionaries with Remote and Status\nkeys. The Status will be OK if it was successful or an error message\nif not.\n\n [\n {\n \"Status\": \"OK\",\n \"Path\": \"test.txt\"\n },\n {\n \"Status\": \"OK\",\n \"Path\": \"test/file4.txt\"\n }\n ]\n\n", - "Opts": { - "description": "The optional description for the job.", - "lifetime": "Lifetime of the active copy in days", - "priority": "Priority of restore: Standard|Expedited|Bulk" - } - }, - { - "Name": "list-multipart-uploads", - "Short": "List the unfinished multipart uploads", - "Long": "This command lists the unfinished multipart uploads in JSON format.\n\n rclone backend list-multipart s3:bucket/path/to/object\n\nIt returns a dictionary of buckets with values as lists of unfinished\nmultipart uploads.\n\nYou can call it with no bucket in which case it lists all bucket, with\na bucket or with a bucket and path.\n\n {\n \"rclone\": [\n {\n \"Initiated\": \"2020-06-26T14:20:36Z\",\n \"Initiator\": {\n \"DisplayName\": \"XXX\",\n \"ID\": \"arn:aws:iam::XXX:user/XXX\"\n },\n \"Key\": \"KEY\",\n \"Owner\": {\n \"DisplayName\": null,\n \"ID\": \"XXX\"\n },\n \"StorageClass\": \"STANDARD\",\n \"UploadId\": \"XXX\"\n }\n ],\n \"rclone-1000files\": [],\n \"rclone-dst\": []\n }\n\n", - "Opts": null - }, - { - "Name": "cleanup", - "Short": "Remove unfinished multipart uploads.", - "Long": "This command removes unfinished multipart uploads of age greater than\nmax-age which defaults to 24 hours.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup s3:bucket/path/to/object\n rclone backend cleanup -o max-age=7w s3:bucket/path/to/object\n\nDurations are parsed as per the rest of rclone, 2h, 7d, 7w etc.\n", - "Opts": { - "max-age": "Max age of upload to delete" - } - }, - { - "Name": "cleanup-hidden", - "Short": "Remove old versions of files.", - "Long": "This command removes any old hidden versions of files\non a versions enabled bucket.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup-hidden s3:bucket/path/to/dir\n", - "Opts": null - }, - { - "Name": "versioning", - "Short": "Set/get versioning support for a bucket.", - "Long": "This command sets versioning support if a parameter is\npassed and then returns the current versioning status for the bucket\nsupplied.\n\n rclone backend versioning s3:bucket # read status only\n rclone backend versioning s3:bucket Enabled\n rclone backend versioning s3:bucket Suspended\n\nIt may return \"Enabled\", \"Suspended\" or \"Unversioned\". Note that once versioning\nhas been enabled the status can't be set back to \"Unversioned\".\n", - "Opts": null - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region to connect to.\n\nLeave blank if you are using an S3 clone and you don't have a region.", + "Provider": "!AWS,Alibaba,ArvanCloud,ChinaMobile,Cloudflare,FlashBlade,IONOS,Petabox,Liara,Linode,Magalu,Qiniu,RackCorp,Scaleway,Selectel,Storj,Synology,TencentCOS,HuaweiOBS,IDrive,Mega", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Use this if unsure.\nWill use v4 signatures and an empty region." + }, + { + "Value": "other-v2-signature", + "Help": "Use this only if v4 signatures don't work.\nE.g. pre Jewel/v10 CEPH." + } ], - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": { - "btime": { - "Help": "Time of file birth (creation) read from Last-Modified header", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z07:00", - "ReadOnly": true - }, - "cache-control": { - "Help": "Cache-Control header", - "Type": "string", - "Example": "no-cache", - "ReadOnly": false - }, - "content-disposition": { - "Help": "Content-Disposition header", - "Type": "string", - "Example": "inline", - "ReadOnly": false - }, - "content-encoding": { - "Help": "Content-Encoding header", - "Type": "string", - "Example": "gzip", - "ReadOnly": false - }, - "content-language": { - "Help": "Content-Language header", - "Type": "string", - "Example": "en-US", - "ReadOnly": false - }, - "content-type": { - "Help": "Content-Type header", - "Type": "string", - "Example": "text/plain", - "ReadOnly": false - }, - "mtime": { - "Help": "Time of last modification, read from rclone metadata", - "Type": "RFC 3339", - "Example": "2006-01-02T15:04:05.999999999Z07:00", - "ReadOnly": false - }, - "tier": { - "Help": "Tier of the object", - "Type": "string", - "Example": "GLACIER", - "ReadOnly": true - } - }, - "Help": "User metadata is stored as x-amz-meta- keys. S3 metadata keys are case insensitive and are always returned in lower case." - } - }, - { - "Name": "seafile", - "Description": "seafile", - "Prefix": "seafile", - "Options": [ - { - "Name": "url", - "Help": "URL of seafile host to connect to.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "https://cloud.seafile.com/", - "Help": "Connect to cloud.seafile.com.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "User name (usually email address).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "pass", - "Help": "Password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "2fa", - "Help": "Two-factor authentication ('true' if the account has 2FA enabled).", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "library", - "Help": "Name of the library.\n\nLeave blank to access all non-encrypted libraries.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "library_key", - "Help": "Library password (for encrypted libraries only).\n\nLeave blank if you pass it through the command line.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "create_library", - "Help": "Should rclone create a library if it doesn't exist.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "auth_token", - "Help": "Authentication token.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 16850954, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,DoubleQuote,BackSlash,Ctl,InvalidUtf8", - "ValueStr": "Slash,DoubleQuote,BackSlash,Ctl,InvalidUtf8", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for S3 API.\n\nLeave blank if using AWS to use the default endpoint for the region.", + "Provider": "AWS", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for China Mobile Ecloud Elastic Object Storage (EOS) API.", + "Provider": "ChinaMobile", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "eos-wuxi-1.cmecloud.cn", + "Help": "The default endpoint - a good choice if you are unsure.\nEast China (Suzhou)" + }, + { + "Value": "eos-jinan-1.cmecloud.cn", + "Help": "East China (Jinan)" + }, + { + "Value": "eos-ningbo-1.cmecloud.cn", + "Help": "East China (Hangzhou)" + }, + { + "Value": "eos-shanghai-1.cmecloud.cn", + "Help": "East China (Shanghai-1)" + }, + { + "Value": "eos-zhengzhou-1.cmecloud.cn", + "Help": "Central China (Zhengzhou)" + }, + { + "Value": "eos-hunan-1.cmecloud.cn", + "Help": "Central China (Changsha-1)" + }, + { + "Value": "eos-zhuzhou-1.cmecloud.cn", + "Help": "Central China (Changsha-2)" + }, + { + "Value": "eos-guangzhou-1.cmecloud.cn", + "Help": "South China (Guangzhou-2)" + }, + { + "Value": "eos-dongguan-1.cmecloud.cn", + "Help": "South China (Guangzhou-3)" + }, + { + "Value": "eos-beijing-1.cmecloud.cn", + "Help": "North China (Beijing-1)" + }, + { + "Value": "eos-beijing-2.cmecloud.cn", + "Help": "North China (Beijing-2)" + }, + { + "Value": "eos-beijing-4.cmecloud.cn", + "Help": "North China (Beijing-3)" + }, + { + "Value": "eos-huhehaote-1.cmecloud.cn", + "Help": "North China (Huhehaote)" + }, + { + "Value": "eos-chengdu-1.cmecloud.cn", + "Help": "Southwest China (Chengdu)" + }, + { + "Value": "eos-chongqing-1.cmecloud.cn", + "Help": "Southwest China (Chongqing)" + }, + { + "Value": "eos-guiyang-1.cmecloud.cn", + "Help": "Southwest China (Guiyang)" + }, + { + "Value": "eos-xian-1.cmecloud.cn", + "Help": "Nouthwest China (Xian)" + }, + { + "Value": "eos-yunnan.cmecloud.cn", + "Help": "Yunnan China (Kunming)" + }, + { + "Value": "eos-yunnan-2.cmecloud.cn", + "Help": "Yunnan China (Kunming-2)" + }, + { + "Value": "eos-tianjin-1.cmecloud.cn", + "Help": "Tianjin China (Tianjin)" + }, + { + "Value": "eos-jilin-1.cmecloud.cn", + "Help": "Jilin China (Changchun)" + }, + { + "Value": "eos-hubei-1.cmecloud.cn", + "Help": "Hubei China (Xiangyan)" + }, + { + "Value": "eos-jiangxi-1.cmecloud.cn", + "Help": "Jiangxi China (Nanchang)" + }, + { + "Value": "eos-gansu-1.cmecloud.cn", + "Help": "Gansu China (Lanzhou)" + }, + { + "Value": "eos-shanxi-1.cmecloud.cn", + "Help": "Shanxi China (Taiyuan)" + }, + { + "Value": "eos-liaoning-1.cmecloud.cn", + "Help": "Liaoning China (Shenyang)" + }, + { + "Value": "eos-hebei-1.cmecloud.cn", + "Help": "Hebei China (Shijiazhuang)" + }, + { + "Value": "eos-fujian-1.cmecloud.cn", + "Help": "Fujian China (Xiamen)" + }, + { + "Value": "eos-guangxi-1.cmecloud.cn", + "Help": "Guangxi China (Nanning)" + }, + { + "Value": "eos-anhui-1.cmecloud.cn", + "Help": "Anhui China (Huainan)" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Arvan Cloud Object Storage (AOS) API.", + "Provider": "ArvanCloud", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.ir-thr-at1.arvanstorage.ir", + "Help": "The default endpoint - a good choice if you are unsure.\nTehran Iran (Simin)" + }, + { + "Value": "s3.ir-tbz-sh1.arvanstorage.ir", + "Help": "Tabriz Iran (Shahriar)" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for IBM COS S3 API.\n\nSpecify if using an IBM COS On Premise.", + "Provider": "IBMCOS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region Endpoint" + }, + { + "Value": "s3.dal.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region Dallas Endpoint" + }, + { + "Value": "s3.wdc.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region Washington DC Endpoint" + }, + { + "Value": "s3.sjc.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region San Jose Endpoint" + }, + { + "Value": "s3.private.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region Private Endpoint" + }, + { + "Value": "s3.private.dal.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region Dallas Private Endpoint" + }, + { + "Value": "s3.private.wdc.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region Washington DC Private Endpoint" + }, + { + "Value": "s3.private.sjc.us.cloud-object-storage.appdomain.cloud", + "Help": "US Cross Region San Jose Private Endpoint" + }, + { + "Value": "s3.us-east.cloud-object-storage.appdomain.cloud", + "Help": "US Region East Endpoint" + }, + { + "Value": "s3.private.us-east.cloud-object-storage.appdomain.cloud", + "Help": "US Region East Private Endpoint" + }, + { + "Value": "s3.us-south.cloud-object-storage.appdomain.cloud", + "Help": "US Region South Endpoint" + }, + { + "Value": "s3.private.us-south.cloud-object-storage.appdomain.cloud", + "Help": "US Region South Private Endpoint" + }, + { + "Value": "s3.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Endpoint" + }, + { + "Value": "s3.fra.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Frankfurt Endpoint" + }, + { + "Value": "s3.mil.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Milan Endpoint" + }, + { + "Value": "s3.ams.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Amsterdam Endpoint" + }, + { + "Value": "s3.private.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Private Endpoint" + }, + { + "Value": "s3.private.fra.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Frankfurt Private Endpoint" + }, + { + "Value": "s3.private.mil.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Milan Private Endpoint" + }, + { + "Value": "s3.private.ams.eu.cloud-object-storage.appdomain.cloud", + "Help": "EU Cross Region Amsterdam Private Endpoint" + }, + { + "Value": "s3.eu-gb.cloud-object-storage.appdomain.cloud", + "Help": "Great Britain Endpoint" + }, + { + "Value": "s3.private.eu-gb.cloud-object-storage.appdomain.cloud", + "Help": "Great Britain Private Endpoint" + }, + { + "Value": "s3.eu-de.cloud-object-storage.appdomain.cloud", + "Help": "EU Region DE Endpoint" + }, + { + "Value": "s3.private.eu-de.cloud-object-storage.appdomain.cloud", + "Help": "EU Region DE Private Endpoint" + }, + { + "Value": "s3.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional Endpoint" + }, + { + "Value": "s3.tok.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional Tokyo Endpoint" + }, + { + "Value": "s3.hkg.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional HongKong Endpoint" + }, + { + "Value": "s3.seo.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional Seoul Endpoint" + }, + { + "Value": "s3.private.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional Private Endpoint" + }, + { + "Value": "s3.private.tok.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional Tokyo Private Endpoint" + }, + { + "Value": "s3.private.hkg.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional HongKong Private Endpoint" + }, + { + "Value": "s3.private.seo.ap.cloud-object-storage.appdomain.cloud", + "Help": "APAC Cross Regional Seoul Private Endpoint" + }, + { + "Value": "s3.jp-tok.cloud-object-storage.appdomain.cloud", + "Help": "APAC Region Japan Endpoint" + }, + { + "Value": "s3.private.jp-tok.cloud-object-storage.appdomain.cloud", + "Help": "APAC Region Japan Private Endpoint" + }, + { + "Value": "s3.au-syd.cloud-object-storage.appdomain.cloud", + "Help": "APAC Region Australia Endpoint" + }, + { + "Value": "s3.private.au-syd.cloud-object-storage.appdomain.cloud", + "Help": "APAC Region Australia Private Endpoint" + }, + { + "Value": "s3.ams03.cloud-object-storage.appdomain.cloud", + "Help": "Amsterdam Single Site Endpoint" + }, + { + "Value": "s3.private.ams03.cloud-object-storage.appdomain.cloud", + "Help": "Amsterdam Single Site Private Endpoint" + }, + { + "Value": "s3.che01.cloud-object-storage.appdomain.cloud", + "Help": "Chennai Single Site Endpoint" + }, + { + "Value": "s3.private.che01.cloud-object-storage.appdomain.cloud", + "Help": "Chennai Single Site Private Endpoint" + }, + { + "Value": "s3.mel01.cloud-object-storage.appdomain.cloud", + "Help": "Melbourne Single Site Endpoint" + }, + { + "Value": "s3.private.mel01.cloud-object-storage.appdomain.cloud", + "Help": "Melbourne Single Site Private Endpoint" + }, + { + "Value": "s3.osl01.cloud-object-storage.appdomain.cloud", + "Help": "Oslo Single Site Endpoint" + }, + { + "Value": "s3.private.osl01.cloud-object-storage.appdomain.cloud", + "Help": "Oslo Single Site Private Endpoint" + }, + { + "Value": "s3.tor01.cloud-object-storage.appdomain.cloud", + "Help": "Toronto Single Site Endpoint" + }, + { + "Value": "s3.private.tor01.cloud-object-storage.appdomain.cloud", + "Help": "Toronto Single Site Private Endpoint" + }, + { + "Value": "s3.seo01.cloud-object-storage.appdomain.cloud", + "Help": "Seoul Single Site Endpoint" + }, + { + "Value": "s3.private.seo01.cloud-object-storage.appdomain.cloud", + "Help": "Seoul Single Site Private Endpoint" + }, + { + "Value": "s3.mon01.cloud-object-storage.appdomain.cloud", + "Help": "Montreal Single Site Endpoint" + }, + { + "Value": "s3.private.mon01.cloud-object-storage.appdomain.cloud", + "Help": "Montreal Single Site Private Endpoint" + }, + { + "Value": "s3.mex01.cloud-object-storage.appdomain.cloud", + "Help": "Mexico Single Site Endpoint" + }, + { + "Value": "s3.private.mex01.cloud-object-storage.appdomain.cloud", + "Help": "Mexico Single Site Private Endpoint" + }, + { + "Value": "s3.sjc04.cloud-object-storage.appdomain.cloud", + "Help": "San Jose Single Site Endpoint" + }, + { + "Value": "s3.private.sjc04.cloud-object-storage.appdomain.cloud", + "Help": "San Jose Single Site Private Endpoint" + }, + { + "Value": "s3.mil01.cloud-object-storage.appdomain.cloud", + "Help": "Milan Single Site Endpoint" + }, + { + "Value": "s3.private.mil01.cloud-object-storage.appdomain.cloud", + "Help": "Milan Single Site Private Endpoint" + }, + { + "Value": "s3.hkg02.cloud-object-storage.appdomain.cloud", + "Help": "Hong Kong Single Site Endpoint" + }, + { + "Value": "s3.private.hkg02.cloud-object-storage.appdomain.cloud", + "Help": "Hong Kong Single Site Private Endpoint" + }, + { + "Value": "s3.par01.cloud-object-storage.appdomain.cloud", + "Help": "Paris Single Site Endpoint" + }, + { + "Value": "s3.private.par01.cloud-object-storage.appdomain.cloud", + "Help": "Paris Single Site Private Endpoint" + }, + { + "Value": "s3.sng01.cloud-object-storage.appdomain.cloud", + "Help": "Singapore Single Site Endpoint" + }, + { + "Value": "s3.private.sng01.cloud-object-storage.appdomain.cloud", + "Help": "Singapore Single Site Private Endpoint" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for IONOS S3 Object Storage.\n\nSpecify the endpoint from the same region.", + "Provider": "IONOS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3-eu-central-1.ionoscloud.com", + "Help": "Frankfurt, Germany" + }, + { + "Value": "s3-eu-central-2.ionoscloud.com", + "Help": "Berlin, Germany" + }, + { + "Value": "s3-eu-south-2.ionoscloud.com", + "Help": "Logrono, Spain" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Petabox S3 Object Storage.\n\nSpecify the endpoint from the same region.", + "Provider": "Petabox", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.petabox.io", + "Help": "US East (N. Virginia)" + }, + { + "Value": "s3.us-east-1.petabox.io", + "Help": "US East (N. Virginia)" + }, + { + "Value": "s3.eu-central-1.petabox.io", + "Help": "Europe (Frankfurt)" + }, + { + "Value": "s3.ap-southeast-1.petabox.io", + "Help": "Asia Pacific (Singapore)" + }, + { + "Value": "s3.me-south-1.petabox.io", + "Help": "Middle East (Bahrain)" + }, + { + "Value": "s3.sa-east-1.petabox.io", + "Help": "South America (São Paulo)" + } + ], + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Leviia Object Storage API.", + "Provider": "Leviia", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.leviia.com", + "Help": "The default endpoint\nLeviia" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Liara Object Storage API.", + "Provider": "Liara", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "storage.iran.liara.space", + "Help": "The default endpoint\nIran" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Linode Object Storage API.", + "Provider": "Linode", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "nl-ams-1.linodeobjects.com", + "Help": "Amsterdam (Netherlands), nl-ams-1" + }, + { + "Value": "us-southeast-1.linodeobjects.com", + "Help": "Atlanta, GA (USA), us-southeast-1" + }, + { + "Value": "in-maa-1.linodeobjects.com", + "Help": "Chennai (India), in-maa-1" + }, + { + "Value": "us-ord-1.linodeobjects.com", + "Help": "Chicago, IL (USA), us-ord-1" + }, + { + "Value": "eu-central-1.linodeobjects.com", + "Help": "Frankfurt (Germany), eu-central-1" + }, + { + "Value": "id-cgk-1.linodeobjects.com", + "Help": "Jakarta (Indonesia), id-cgk-1" + }, + { + "Value": "gb-lon-1.linodeobjects.com", + "Help": "London 2 (Great Britain), gb-lon-1" + }, + { + "Value": "us-lax-1.linodeobjects.com", + "Help": "Los Angeles, CA (USA), us-lax-1" + }, + { + "Value": "es-mad-1.linodeobjects.com", + "Help": "Madrid (Spain), es-mad-1" + }, + { + "Value": "au-mel-1.linodeobjects.com", + "Help": "Melbourne (Australia), au-mel-1" + }, + { + "Value": "us-mia-1.linodeobjects.com", + "Help": "Miami, FL (USA), us-mia-1" + }, + { + "Value": "it-mil-1.linodeobjects.com", + "Help": "Milan (Italy), it-mil-1" + }, + { + "Value": "us-east-1.linodeobjects.com", + "Help": "Newark, NJ (USA), us-east-1" + }, + { + "Value": "jp-osa-1.linodeobjects.com", + "Help": "Osaka (Japan), jp-osa-1" + }, + { + "Value": "fr-par-1.linodeobjects.com", + "Help": "Paris (France), fr-par-1" + }, + { + "Value": "br-gru-1.linodeobjects.com", + "Help": "São Paulo (Brazil), br-gru-1" + }, + { + "Value": "us-sea-1.linodeobjects.com", + "Help": "Seattle, WA (USA), us-sea-1" + }, + { + "Value": "ap-south-1.linodeobjects.com", + "Help": "Singapore, ap-south-1" + }, + { + "Value": "sg-sin-1.linodeobjects.com", + "Help": "Singapore 2, sg-sin-1" + }, + { + "Value": "se-sto-1.linodeobjects.com", + "Help": "Stockholm (Sweden), se-sto-1" + }, + { + "Value": "us-iad-1.linodeobjects.com", + "Help": "Washington, DC, (USA), us-iad-1" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Lyve Cloud S3 API.\nRequired when using an S3 clone. Please type in your LyveCloud endpoint.\nExamples:\n- s3.us-west-1.{account_name}.lyve.seagate.com (US West 1 - California)\n- s3.eu-west-1.{account_name}.lyve.seagate.com (EU West 1 - Ireland)", + "Provider": "LyveCloud", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Magalu Object Storage API.", + "Provider": "Magalu", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "br-se1.magaluobjects.com", + "Help": "São Paulo, SP (BR), br-se1" + }, + { + "Value": "br-ne1.magaluobjects.com", + "Help": "Fortaleza, CE (BR), br-ne1" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for OSS API.", + "Provider": "Alibaba", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "oss-accelerate.aliyuncs.com", + "Help": "Global Accelerate" + }, + { + "Value": "oss-accelerate-overseas.aliyuncs.com", + "Help": "Global Accelerate (outside mainland China)" + }, + { + "Value": "oss-cn-hangzhou.aliyuncs.com", + "Help": "East China 1 (Hangzhou)" + }, + { + "Value": "oss-cn-shanghai.aliyuncs.com", + "Help": "East China 2 (Shanghai)" + }, + { + "Value": "oss-cn-qingdao.aliyuncs.com", + "Help": "North China 1 (Qingdao)" + }, + { + "Value": "oss-cn-beijing.aliyuncs.com", + "Help": "North China 2 (Beijing)" + }, + { + "Value": "oss-cn-zhangjiakou.aliyuncs.com", + "Help": "North China 3 (Zhangjiakou)" + }, + { + "Value": "oss-cn-huhehaote.aliyuncs.com", + "Help": "North China 5 (Hohhot)" + }, + { + "Value": "oss-cn-wulanchabu.aliyuncs.com", + "Help": "North China 6 (Ulanqab)" + }, + { + "Value": "oss-cn-shenzhen.aliyuncs.com", + "Help": "South China 1 (Shenzhen)" + }, + { + "Value": "oss-cn-heyuan.aliyuncs.com", + "Help": "South China 2 (Heyuan)" + }, + { + "Value": "oss-cn-guangzhou.aliyuncs.com", + "Help": "South China 3 (Guangzhou)" + }, + { + "Value": "oss-cn-chengdu.aliyuncs.com", + "Help": "West China 1 (Chengdu)" + }, + { + "Value": "oss-cn-hongkong.aliyuncs.com", + "Help": "Hong Kong (Hong Kong)" + }, + { + "Value": "oss-us-west-1.aliyuncs.com", + "Help": "US West 1 (Silicon Valley)" + }, + { + "Value": "oss-us-east-1.aliyuncs.com", + "Help": "US East 1 (Virginia)" + }, + { + "Value": "oss-ap-southeast-1.aliyuncs.com", + "Help": "Southeast Asia Southeast 1 (Singapore)" + }, + { + "Value": "oss-ap-southeast-2.aliyuncs.com", + "Help": "Asia Pacific Southeast 2 (Sydney)" + }, + { + "Value": "oss-ap-southeast-3.aliyuncs.com", + "Help": "Southeast Asia Southeast 3 (Kuala Lumpur)" + }, + { + "Value": "oss-ap-southeast-5.aliyuncs.com", + "Help": "Asia Pacific Southeast 5 (Jakarta)" + }, + { + "Value": "oss-ap-northeast-1.aliyuncs.com", + "Help": "Asia Pacific Northeast 1 (Japan)" + }, + { + "Value": "oss-ap-south-1.aliyuncs.com", + "Help": "Asia Pacific South 1 (Mumbai)" + }, + { + "Value": "oss-eu-central-1.aliyuncs.com", + "Help": "Central Europe 1 (Frankfurt)" + }, + { + "Value": "oss-eu-west-1.aliyuncs.com", + "Help": "West Europe (London)" + }, + { + "Value": "oss-me-east-1.aliyuncs.com", + "Help": "Middle East 1 (Dubai)" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for OBS API.", + "Provider": "HuaweiOBS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "obs.af-south-1.myhuaweicloud.com", + "Help": "AF-Johannesburg" + }, + { + "Value": "obs.ap-southeast-2.myhuaweicloud.com", + "Help": "AP-Bangkok" + }, + { + "Value": "obs.ap-southeast-3.myhuaweicloud.com", + "Help": "AP-Singapore" + }, + { + "Value": "obs.cn-east-3.myhuaweicloud.com", + "Help": "CN East-Shanghai1" + }, + { + "Value": "obs.cn-east-2.myhuaweicloud.com", + "Help": "CN East-Shanghai2" + }, + { + "Value": "obs.cn-north-1.myhuaweicloud.com", + "Help": "CN North-Beijing1" + }, + { + "Value": "obs.cn-north-4.myhuaweicloud.com", + "Help": "CN North-Beijing4" + }, + { + "Value": "obs.cn-south-1.myhuaweicloud.com", + "Help": "CN South-Guangzhou" + }, + { + "Value": "obs.ap-southeast-1.myhuaweicloud.com", + "Help": "CN-Hong Kong" + }, + { + "Value": "obs.sa-argentina-1.myhuaweicloud.com", + "Help": "LA-Buenos Aires1" + }, + { + "Value": "obs.sa-peru-1.myhuaweicloud.com", + "Help": "LA-Lima1" + }, + { + "Value": "obs.na-mexico-1.myhuaweicloud.com", + "Help": "LA-Mexico City1" + }, + { + "Value": "obs.sa-chile-1.myhuaweicloud.com", + "Help": "LA-Santiago2" + }, + { + "Value": "obs.sa-brazil-1.myhuaweicloud.com", + "Help": "LA-Sao Paulo1" + }, + { + "Value": "obs.ru-northwest-2.myhuaweicloud.com", + "Help": "RU-Moscow2" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Scaleway Object Storage.", + "Provider": "Scaleway", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.nl-ams.scw.cloud", + "Help": "Amsterdam Endpoint" + }, + { + "Value": "s3.fr-par.scw.cloud", + "Help": "Paris Endpoint" + }, + { + "Value": "s3.pl-waw.scw.cloud", + "Help": "Warsaw Endpoint" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for StackPath Object Storage.", + "Provider": "StackPath", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.us-east-2.stackpathstorage.com", + "Help": "US East Endpoint" + }, + { + "Value": "s3.us-west-1.stackpathstorage.com", + "Help": "US West Endpoint" + }, + { + "Value": "s3.eu-central-1.stackpathstorage.com", + "Help": "EU Endpoint" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Google Cloud Storage.", + "Provider": "GCS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "https://storage.googleapis.com", + "Help": "Google Cloud Storage endpoint" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Storj Gateway.", + "Provider": "Storj", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "gateway.storjshare.io", + "Help": "Global Hosted Gateway" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Synology C2 Object Storage API.", + "Provider": "Synology", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "eu-001.s3.synologyc2.net", + "Help": "EU Endpoint 1" + }, + { + "Value": "eu-002.s3.synologyc2.net", + "Help": "EU Endpoint 2" + }, + { + "Value": "us-001.s3.synologyc2.net", + "Help": "US Endpoint 1" + }, + { + "Value": "us-002.s3.synologyc2.net", + "Help": "US Endpoint 2" + }, + { + "Value": "tw-001.s3.synologyc2.net", + "Help": "TW Endpoint 1" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Tencent COS API.", + "Provider": "TencentCOS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "cos.ap-beijing.myqcloud.com", + "Help": "Beijing Region" + }, + { + "Value": "cos.ap-nanjing.myqcloud.com", + "Help": "Nanjing Region" + }, + { + "Value": "cos.ap-shanghai.myqcloud.com", + "Help": "Shanghai Region" + }, + { + "Value": "cos.ap-guangzhou.myqcloud.com", + "Help": "Guangzhou Region" + }, + { + "Value": "cos.ap-nanjing.myqcloud.com", + "Help": "Nanjing Region" + }, + { + "Value": "cos.ap-chengdu.myqcloud.com", + "Help": "Chengdu Region" + }, + { + "Value": "cos.ap-chongqing.myqcloud.com", + "Help": "Chongqing Region" + }, + { + "Value": "cos.ap-hongkong.myqcloud.com", + "Help": "Hong Kong (China) Region" + }, + { + "Value": "cos.ap-singapore.myqcloud.com", + "Help": "Singapore Region" + }, + { + "Value": "cos.ap-mumbai.myqcloud.com", + "Help": "Mumbai Region" + }, + { + "Value": "cos.ap-seoul.myqcloud.com", + "Help": "Seoul Region" + }, + { + "Value": "cos.ap-bangkok.myqcloud.com", + "Help": "Bangkok Region" + }, + { + "Value": "cos.ap-tokyo.myqcloud.com", + "Help": "Tokyo Region" + }, + { + "Value": "cos.na-siliconvalley.myqcloud.com", + "Help": "Silicon Valley Region" + }, + { + "Value": "cos.na-ashburn.myqcloud.com", + "Help": "Virginia Region" + }, + { + "Value": "cos.na-toronto.myqcloud.com", + "Help": "Toronto Region" + }, + { + "Value": "cos.eu-frankfurt.myqcloud.com", + "Help": "Frankfurt Region" + }, + { + "Value": "cos.eu-moscow.myqcloud.com", + "Help": "Moscow Region" + }, + { + "Value": "cos.accelerate.myqcloud.com", + "Help": "Use Tencent COS Accelerate Endpoint" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for RackCorp Object Storage.", + "Provider": "RackCorp", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.rackcorp.com", + "Help": "Global (AnyCast) Endpoint" + }, + { + "Value": "au.s3.rackcorp.com", + "Help": "Australia (Anycast) Endpoint" + }, + { + "Value": "au-nsw.s3.rackcorp.com", + "Help": "Sydney (Australia) Endpoint" + }, + { + "Value": "au-qld.s3.rackcorp.com", + "Help": "Brisbane (Australia) Endpoint" + }, + { + "Value": "au-vic.s3.rackcorp.com", + "Help": "Melbourne (Australia) Endpoint" + }, + { + "Value": "au-wa.s3.rackcorp.com", + "Help": "Perth (Australia) Endpoint" + }, + { + "Value": "ph.s3.rackcorp.com", + "Help": "Manila (Philippines) Endpoint" + }, + { + "Value": "th.s3.rackcorp.com", + "Help": "Bangkok (Thailand) Endpoint" + }, + { + "Value": "hk.s3.rackcorp.com", + "Help": "HK (Hong Kong) Endpoint" + }, + { + "Value": "mn.s3.rackcorp.com", + "Help": "Ulaanbaatar (Mongolia) Endpoint" + }, + { + "Value": "kg.s3.rackcorp.com", + "Help": "Bishkek (Kyrgyzstan) Endpoint" + }, + { + "Value": "id.s3.rackcorp.com", + "Help": "Jakarta (Indonesia) Endpoint" + }, + { + "Value": "jp.s3.rackcorp.com", + "Help": "Tokyo (Japan) Endpoint" + }, + { + "Value": "sg.s3.rackcorp.com", + "Help": "SG (Singapore) Endpoint" + }, + { + "Value": "de.s3.rackcorp.com", + "Help": "Frankfurt (Germany) Endpoint" + }, + { + "Value": "us.s3.rackcorp.com", + "Help": "USA (AnyCast) Endpoint" + }, + { + "Value": "us-east-1.s3.rackcorp.com", + "Help": "New York (USA) Endpoint" + }, + { + "Value": "us-west-1.s3.rackcorp.com", + "Help": "Freemont (USA) Endpoint" + }, + { + "Value": "nz.s3.rackcorp.com", + "Help": "Auckland (New Zealand) Endpoint" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Qiniu Object Storage.", + "Provider": "Qiniu", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3-cn-east-1.qiniucs.com", + "Help": "East China Endpoint 1" + }, + { + "Value": "s3-cn-east-2.qiniucs.com", + "Help": "East China Endpoint 2" + }, + { + "Value": "s3-cn-north-1.qiniucs.com", + "Help": "North China Endpoint 1" + }, + { + "Value": "s3-cn-south-1.qiniucs.com", + "Help": "South China Endpoint 1" + }, + { + "Value": "s3-us-north-1.qiniucs.com", + "Help": "North America Endpoint 1" + }, + { + "Value": "s3-ap-southeast-1.qiniucs.com", + "Help": "Southeast Asia Endpoint 1" + }, + { + "Value": "s3-ap-northeast-1.qiniucs.com", + "Help": "Northeast Asia Endpoint 1" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for Selectel Object Storage.", + "Provider": "Selectel", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "s3.ru-1.storage.selcloud.ru", + "Help": "Saint Petersburg" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for S3 API.\n\nRequired when using an S3 clone.", + "Provider": "!AWS,ArvanCloud,IBMCOS,IDrive,IONOS,TencentCOS,HuaweiOBS,Alibaba,ChinaMobile,GCS,Liara,Linode,LyveCloud,Magalu,Scaleway,Selectel,StackPath,Storj,Synology,RackCorp,Qiniu,Petabox", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "objects-us-east-1.dream.io", + "Help": "Dream Objects endpoint", + "Provider": "Dreamhost" + }, + { + "Value": "syd1.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces Sydney 1", + "Provider": "DigitalOcean" + }, + { + "Value": "sfo3.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces San Francisco 3", + "Provider": "DigitalOcean" + }, + { + "Value": "sfo2.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces San Francisco 2", + "Provider": "DigitalOcean" + }, + { + "Value": "fra1.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces Frankfurt 1", + "Provider": "DigitalOcean" + }, + { + "Value": "nyc3.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces New York 3", + "Provider": "DigitalOcean" + }, + { + "Value": "ams3.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces Amsterdam 3", + "Provider": "DigitalOcean" + }, + { + "Value": "sgp1.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces Singapore 1", + "Provider": "DigitalOcean" + }, + { + "Value": "lon1.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces London 1", + "Provider": "DigitalOcean" + }, + { + "Value": "tor1.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces Toronto 1", + "Provider": "DigitalOcean" + }, + { + "Value": "blr1.digitaloceanspaces.com", + "Help": "DigitalOcean Spaces Bangalore 1", + "Provider": "DigitalOcean" + }, + { + "Value": "localhost:8333", + "Help": "SeaweedFS S3 localhost", + "Provider": "SeaweedFS" + }, + { + "Value": "oos.eu-west-2.outscale.com", + "Help": "Outscale EU West 2 (Paris)", + "Provider": "Outscale" + }, + { + "Value": "oos.us-east-2.outscale.com", + "Help": "Outscale US east 2 (New Jersey)", + "Provider": "Outscale" + }, + { + "Value": "oos.us-west-1.outscale.com", + "Help": "Outscale EU West 1 (California)", + "Provider": "Outscale" + }, + { + "Value": "oos.cloudgouv-eu-west-1.outscale.com", + "Help": "Outscale SecNumCloud (Paris)", + "Provider": "Outscale" + }, + { + "Value": "oos.ap-northeast-1.outscale.com", + "Help": "Outscale AP Northeast 1 (Japan)", + "Provider": "Outscale" + }, + { + "Value": "s3.wasabisys.com", + "Help": "Wasabi US East 1 (N. Virginia)", + "Provider": "Wasabi" + }, + { + "Value": "s3.us-east-2.wasabisys.com", + "Help": "Wasabi US East 2 (N. Virginia)", + "Provider": "Wasabi" + }, + { + "Value": "s3.us-central-1.wasabisys.com", + "Help": "Wasabi US Central 1 (Texas)", + "Provider": "Wasabi" + }, + { + "Value": "s3.us-west-1.wasabisys.com", + "Help": "Wasabi US West 1 (Oregon)", + "Provider": "Wasabi" + }, + { + "Value": "s3.ca-central-1.wasabisys.com", + "Help": "Wasabi CA Central 1 (Toronto)", + "Provider": "Wasabi" + }, + { + "Value": "s3.eu-central-1.wasabisys.com", + "Help": "Wasabi EU Central 1 (Amsterdam)", + "Provider": "Wasabi" + }, + { + "Value": "s3.eu-central-2.wasabisys.com", + "Help": "Wasabi EU Central 2 (Frankfurt)", + "Provider": "Wasabi" + }, + { + "Value": "s3.eu-west-1.wasabisys.com", + "Help": "Wasabi EU West 1 (London)", + "Provider": "Wasabi" + }, + { + "Value": "s3.eu-west-2.wasabisys.com", + "Help": "Wasabi EU West 2 (Paris)", + "Provider": "Wasabi" + }, + { + "Value": "s3.eu-south-1.wasabisys.com", + "Help": "Wasabi EU South 1 (Milan)", + "Provider": "Wasabi" + }, + { + "Value": "s3.ap-northeast-1.wasabisys.com", + "Help": "Wasabi AP Northeast 1 (Tokyo) endpoint", + "Provider": "Wasabi" + }, + { + "Value": "s3.ap-northeast-2.wasabisys.com", + "Help": "Wasabi AP Northeast 2 (Osaka) endpoint", + "Provider": "Wasabi" + }, + { + "Value": "s3.ap-southeast-1.wasabisys.com", + "Help": "Wasabi AP Southeast 1 (Singapore)", + "Provider": "Wasabi" + }, + { + "Value": "s3.ap-southeast-2.wasabisys.com", + "Help": "Wasabi AP Southeast 2 (Sydney)", + "Provider": "Wasabi" + }, + { + "Value": "storage.iran.liara.space", + "Help": "Liara Iran endpoint", + "Provider": "Liara" + }, + { + "Value": "s3.ir-thr-at1.arvanstorage.ir", + "Help": "ArvanCloud Tehran Iran (Simin) endpoint", + "Provider": "ArvanCloud" + }, + { + "Value": "s3.ir-tbz-sh1.arvanstorage.ir", + "Help": "ArvanCloud Tabriz Iran (Shahriar) endpoint", + "Provider": "ArvanCloud" + }, + { + "Value": "s3.eu-central-1.s4.mega.io", + "Help": "Mega S4 eu-central-1 (Amsterdam)", + "Provider": "Mega" + }, + { + "Value": "s3.eu-central-2.s4.mega.io", + "Help": "Mega S4 eu-central-2 (Bettembourg)", + "Provider": "Mega" + }, + { + "Value": "s3.ca-central-1.s4.mega.io", + "Help": "Mega S4 ca-central-1 (Montreal)", + "Provider": "Mega" + }, + { + "Value": "s3.ca-west-1.s4.mega.io", + "Help": "Mega S4 ca-west-1 (Vancouver)", + "Provider": "Mega" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "location_constraint", + "FieldName": "", + "Help": "Location constraint - must be set to match the Region.\n\nUsed when creating buckets only.", + "Provider": "AWS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Empty for US Region, Northern Virginia, or Pacific Northwest" + }, + { + "Value": "us-east-2", + "Help": "US East (Ohio) Region" + }, + { + "Value": "us-west-1", + "Help": "US West (Northern California) Region" + }, + { + "Value": "us-west-2", + "Help": "US West (Oregon) Region" + }, + { + "Value": "ca-central-1", + "Help": "Canada (Central) Region" + }, + { + "Value": "eu-west-1", + "Help": "EU (Ireland) Region" + }, + { + "Value": "eu-west-2", + "Help": "EU (London) Region" + }, + { + "Value": "eu-west-3", + "Help": "EU (Paris) Region" + }, + { + "Value": "eu-north-1", + "Help": "EU (Stockholm) Region" + }, + { + "Value": "eu-south-1", + "Help": "EU (Milan) Region" + }, + { + "Value": "EU", + "Help": "EU Region" + }, + { + "Value": "ap-southeast-1", + "Help": "Asia Pacific (Singapore) Region" + }, + { + "Value": "ap-southeast-2", + "Help": "Asia Pacific (Sydney) Region" + }, + { + "Value": "ap-northeast-1", + "Help": "Asia Pacific (Tokyo) Region" + }, + { + "Value": "ap-northeast-2", + "Help": "Asia Pacific (Seoul) Region" + }, + { + "Value": "ap-northeast-3", + "Help": "Asia Pacific (Osaka-Local) Region" + }, + { + "Value": "ap-south-1", + "Help": "Asia Pacific (Mumbai) Region" + }, + { + "Value": "ap-east-1", + "Help": "Asia Pacific (Hong Kong) Region" + }, + { + "Value": "sa-east-1", + "Help": "South America (Sao Paulo) Region" + }, + { + "Value": "il-central-1", + "Help": "Israel (Tel Aviv) Region" + }, + { + "Value": "me-south-1", + "Help": "Middle East (Bahrain) Region" + }, + { + "Value": "af-south-1", + "Help": "Africa (Cape Town) Region" + }, + { + "Value": "cn-north-1", + "Help": "China (Beijing) Region" + }, + { + "Value": "cn-northwest-1", + "Help": "China (Ningxia) Region" + }, + { + "Value": "us-gov-east-1", + "Help": "AWS GovCloud (US-East) Region" + }, + { + "Value": "us-gov-west-1", + "Help": "AWS GovCloud (US) Region" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "location_constraint", + "FieldName": "", + "Help": "Location constraint - must match endpoint.\n\nUsed when creating buckets only.", + "Provider": "ChinaMobile", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "wuxi1", + "Help": "East China (Suzhou)" + }, + { + "Value": "jinan1", + "Help": "East China (Jinan)" + }, + { + "Value": "ningbo1", + "Help": "East China (Hangzhou)" + }, + { + "Value": "shanghai1", + "Help": "East China (Shanghai-1)" + }, + { + "Value": "zhengzhou1", + "Help": "Central China (Zhengzhou)" + }, + { + "Value": "hunan1", + "Help": "Central China (Changsha-1)" + }, + { + "Value": "zhuzhou1", + "Help": "Central China (Changsha-2)" + }, + { + "Value": "guangzhou1", + "Help": "South China (Guangzhou-2)" + }, + { + "Value": "dongguan1", + "Help": "South China (Guangzhou-3)" + }, + { + "Value": "beijing1", + "Help": "North China (Beijing-1)" + }, + { + "Value": "beijing2", + "Help": "North China (Beijing-2)" + }, + { + "Value": "beijing4", + "Help": "North China (Beijing-3)" + }, + { + "Value": "huhehaote1", + "Help": "North China (Huhehaote)" + }, + { + "Value": "chengdu1", + "Help": "Southwest China (Chengdu)" + }, + { + "Value": "chongqing1", + "Help": "Southwest China (Chongqing)" + }, + { + "Value": "guiyang1", + "Help": "Southwest China (Guiyang)" + }, + { + "Value": "xian1", + "Help": "Nouthwest China (Xian)" + }, + { + "Value": "yunnan", + "Help": "Yunnan China (Kunming)" + }, + { + "Value": "yunnan2", + "Help": "Yunnan China (Kunming-2)" + }, + { + "Value": "tianjin1", + "Help": "Tianjin China (Tianjin)" + }, + { + "Value": "jilin1", + "Help": "Jilin China (Changchun)" + }, + { + "Value": "hubei1", + "Help": "Hubei China (Xiangyan)" + }, + { + "Value": "jiangxi1", + "Help": "Jiangxi China (Nanchang)" + }, + { + "Value": "gansu1", + "Help": "Gansu China (Lanzhou)" + }, + { + "Value": "shanxi1", + "Help": "Shanxi China (Taiyuan)" + }, + { + "Value": "liaoning1", + "Help": "Liaoning China (Shenyang)" + }, + { + "Value": "hebei1", + "Help": "Hebei China (Shijiazhuang)" + }, + { + "Value": "fujian1", + "Help": "Fujian China (Xiamen)" + }, + { + "Value": "guangxi1", + "Help": "Guangxi China (Nanning)" + }, + { + "Value": "anhui1", + "Help": "Anhui China (Huainan)" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "sftp", - "Description": "SSH/SFTP", - "Prefix": "sftp", - "Options": [ - { - "Name": "host", - "Help": "SSH host to connect to.\n\nE.g. \"example.com\".", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "SSH username.", - "Provider": "", - "Default": "zenon", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "zenon", - "ValueStr": "zenon", - "Type": "string" - }, - { - "Name": "port", - "Help": "SSH port number.", - "Provider": "", - "Default": 22, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "22", - "ValueStr": "22", - "Type": "int" - }, - { - "Name": "pass", - "Help": "SSH password, leave blank to use ssh-agent.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "key_pem", - "Help": "Raw PEM-encoded private key.\n\nIf specified, will override key_file parameter.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "key_file", - "Help": "Path to PEM-encoded private key file.\n\nLeave blank or set key-use-agent to use ssh-agent.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "key_file_pass", - "Help": "The passphrase to decrypt the PEM-encoded private key file.\n\nOnly PEM encrypted key files (old OpenSSH format) are supported. Encrypted keys\nin the new OpenSSH format can't be used.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "pubkey_file", - "Help": "Optional path to public key file.\n\nSet this if you have a signed certificate you want to use for authentication.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "known_hosts_file", - "Help": "Optional path to known_hosts file.\n\nSet this value to enable server host key validation.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "~/.ssh/known_hosts", - "Help": "Use OpenSSH's known_hosts file.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "key_use_agent", - "Help": "When set forces the usage of the ssh-agent.\n\nWhen key-file is also set, the \".pub\" file of the specified key-file is read and only the associated key is\nrequested from the ssh-agent. This allows to avoid `Too many authentication failures for *username*` errors\nwhen the ssh-agent contains many keys.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "use_insecure_cipher", - "Help": "Enable the use of insecure ciphers and key exchange methods.\n\nThis enables the use of the following insecure ciphers and key exchange methods:\n\n- aes128-cbc\n- aes192-cbc\n- aes256-cbc\n- 3des-cbc\n- diffie-hellman-group-exchange-sha256\n- diffie-hellman-group-exchange-sha1\n\nThose algorithms are insecure and may allow plaintext data to be recovered by an attacker.\n\nThis must be false if you use either ciphers or key_exchange advanced options.\n", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "false", - "Help": "Use default Cipher list.", - "Provider": "" - }, - { - "Value": "true", - "Help": "Enables the use of the aes128-cbc cipher and diffie-hellman-group-exchange-sha256, diffie-hellman-group-exchange-sha1 key exchange.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_hashcheck", - "Help": "Disable the execution of SSH commands to determine if remote file hashing is available.\n\nLeave blank or set to false to enable hashing (recommended), set to true to disable hashing.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "ask_password", - "Help": "Allow asking for SFTP password when needed.\n\nIf this is set and no password is supplied then rclone will:\n- ask for a password\n- not contact the ssh agent\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "path_override", - "Help": "Override path used by SSH shell commands.\n\nThis allows checksum calculation when SFTP and SSH paths are\ndifferent. This issue affects among others Synology NAS boxes.\n\nE.g. if shared folders can be found in directories representing volumes:\n\n rclone sync /home/local/directory remote:/directory --sftp-path-override /volume2/directory\n\nE.g. if home directory can be found in a shared folder called \"home\":\n\n rclone sync /home/local/directory remote:/home/directory --sftp-path-override /volume1/homes/USER/directory", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "set_modtime", - "Help": "Set the modified time on the remote if set.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "shell_type", - "Help": "The type of SSH shell on remote server, if any.\n\nLeave blank for autodetect.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "none", - "Help": "No shell access", - "Provider": "" - }, - { - "Value": "unix", - "Help": "Unix shell", - "Provider": "" - }, - { - "Value": "powershell", - "Help": "PowerShell", - "Provider": "" - }, - { - "Value": "cmd", - "Help": "Windows Command Prompt", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "md5sum_command", - "Help": "The command used to read md5 hashes.\n\nLeave blank for autodetect.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "sha1sum_command", - "Help": "The command used to read sha1 hashes.\n\nLeave blank for autodetect.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "skip_links", - "Help": "Set to skip any symlinks and any other non regular files.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "subsystem", - "Help": "Specifies the SSH2 subsystem on the remote host.", - "Provider": "", - "Default": "sftp", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "sftp", - "ValueStr": "sftp", - "Type": "string" - }, - { - "Name": "server_command", - "Help": "Specifies the path or command to run a sftp server on the remote host.\n\nThe subsystem option is ignored when server_command is defined.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "use_fstat", - "Help": "If set use fstat instead of stat.\n\nSome servers limit the amount of open files and calling Stat after opening\nthe file will throw an error from the server. Setting this flag will call\nFstat instead of Stat which is called on an already open file handle.\n\nIt has been found that this helps with IBM Sterling SFTP servers which have\n\"extractability\" level set to 1 which means only 1 file can be opened at\nany given time.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_concurrent_reads", - "Help": "If set don't use concurrent reads.\n\nNormally concurrent reads are safe to use and not using them will\ndegrade performance, so this option is disabled by default.\n\nSome servers limit the amount number of times a file can be\ndownloaded. Using concurrent reads can trigger this limit, so if you\nhave a server which returns\n\n Failed to copy: file does not exist\n\nThen you may need to enable this flag.\n\nIf concurrent reads are disabled, the use_fstat option is ignored.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "disable_concurrent_writes", - "Help": "If set don't use concurrent writes.\n\nNormally rclone uses concurrent writes to upload files. This improves\nthe performance greatly, especially for distant servers.\n\nThis option disables concurrent writes should that be necessary.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "idle_timeout", - "Help": "Max time before closing idle connections.\n\nIf no connections have been returned to the connection pool in the time\ngiven, rclone will empty the connection pool.\n\nSet to 0 to keep connections indefinitely.\n", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "chunk_size", - "Help": "Upload and download chunk size.\n\nThis controls the maximum size of payload in SFTP protocol packets.\nThe RFC limits this to 32768 bytes (32k), which is the default. However,\na lot of servers support larger sizes, typically limited to a maximum\ntotal package size of 256k, and setting it larger will increase transfer\nspeed dramatically on high latency links. This includes OpenSSH, and,\nfor example, using the value of 255k works well, leaving plenty of room\nfor overhead while still being within a total packet size of 256k.\n\nMake sure to test thoroughly before using a value higher than 32k,\nand only use it if you always connect to the same server or after\nsufficiently broad testing. If you get errors such as\n\"failed to send packet payload: EOF\", lots of \"connection lost\",\nor \"corrupted on transfer\", when copying a larger file, try lowering\nthe value. The server run by [rclone serve sftp](/commands/rclone_serve_sftp)\nsends packets with standard 32k maximum payload so you must not\nset a different chunk_size when downloading files, but it accepts\npackets up to the 256k total size, so for uploads the chunk_size\ncan be set as for the OpenSSH example above.\n", - "Provider": "", - "Default": 32768, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "32Ki", - "ValueStr": "32Ki", - "Type": "SizeSuffix" - }, - { - "Name": "concurrency", - "Help": "The maximum number of outstanding requests for one file\n\nThis controls the maximum number of outstanding requests for one file.\nIncreasing it will increase throughput on high latency links at the\ncost of using more memory.\n", - "Provider": "", - "Default": 64, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "64", - "ValueStr": "64", - "Type": "int" - }, - { - "Name": "set_env", - "Help": "Environment variables to pass to sftp and commands\n\nSet environment variables in the form:\n\n VAR=value\n\nto be passed to the sftp client and to any commands run (eg md5sum).\n\nPass multiple variables space separated, eg\n\n VAR1=value VAR2=value\n\nand pass variables with spaces in quotes, eg\n\n \"VAR3=value with space\" \"VAR4=value with space\" VAR5=nospacehere\n\n", - "Provider": "", - "Default": [], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "SpaceSepList" - }, - { - "Name": "ciphers", - "Help": "Space separated list of ciphers to be used for session encryption, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q cipher.\n\nThis must not be set if use_insecure_cipher is true.\n\nExample:\n\n aes128-ctr aes192-ctr aes256-ctr aes128-gcm@openssh.com aes256-gcm@openssh.com\n", - "Provider": "", - "Default": [], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "SpaceSepList" - }, - { - "Name": "key_exchange", - "Help": "Space separated list of key exchange algorithms, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q kex.\n\nThis must not be set if use_insecure_cipher is true.\n\nExample:\n\n sntrup761x25519-sha512@openssh.com curve25519-sha256 curve25519-sha256@libssh.org ecdh-sha2-nistp256\n", - "Provider": "", - "Default": [], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "SpaceSepList" - }, - { - "Name": "macs", - "Help": "Space separated list of MACs (message authentication code) algorithms, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q mac.\n\nExample:\n\n umac-64-etm@openssh.com umac-128-etm@openssh.com hmac-sha2-256-etm@openssh.com\n", - "Provider": "", - "Default": [], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "SpaceSepList" - }, - { - "Name": "host_key_algorithms", - "Help": "Space separated list of host key algorithms, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q HostKeyAlgorithms.\n\nNote: This can affect the outcome of key negotiation with the server even if server host key validation is not enabled.\n\nExample:\n\n ssh-ed25519 ssh-rsa ssh-dss\n", - "Provider": "", - "Default": [], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "SpaceSepList" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "location_constraint", + "FieldName": "", + "Help": "Location constraint - must match endpoint.\n\nUsed when creating buckets only.", + "Provider": "ArvanCloud", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "ir-thr-at1", + "Help": "Tehran Iran (Simin)" + }, + { + "Value": "ir-tbz-sh1", + "Help": "Tabriz Iran (Shahriar)" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "sharefile", - "Description": "Citrix Sharefile", - "Prefix": "sharefile", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "upload_cutoff", - "Help": "Cutoff for switching to multipart upload.", - "Provider": "", - "Default": 134217728, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "128Mi", - "ValueStr": "128Mi", - "Type": "SizeSuffix" - }, - { - "Name": "root_folder_id", - "Help": "ID of the root folder.\n\nLeave blank to access \"Personal Folders\". You can use one of the\nstandard values here or any folder ID (long hex number ID).", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Access the Personal Folders (default).", - "Provider": "" - }, - { - "Value": "favorites", - "Help": "Access the Favorites folder.", - "Provider": "" - }, - { - "Value": "allshared", - "Help": "Access all the shared folders.", - "Provider": "" - }, - { - "Value": "connectors", - "Help": "Access all the individual connectors.", - "Provider": "" - }, - { - "Value": "top", - "Help": "Access the home, favorites, and shared folders as well as the connectors.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "chunk_size", - "Help": "Upload chunk size.\n\nMust a power of 2 \u003e= 256k.\n\nMaking this larger will improve performance, but note that each chunk\nis buffered in memory one per transfer.\n\nReducing this will reduce memory usage but decrease performance.", - "Provider": "", - "Default": 67108864, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "64Mi", - "ValueStr": "64Mi", - "Type": "SizeSuffix" - }, - { - "Name": "endpoint", - "Help": "Endpoint for API calls.\n\nThis is usually auto discovered as part of the oauth process, but can\nbe set manually to something like: https://XXX.sharefile.com\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 57091982, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,LeftPeriod,RightSpace,RightPeriod,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,LeftPeriod,RightSpace,RightPeriod,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "location_constraint", + "FieldName": "", + "Help": "Location constraint - must match endpoint when using IBM Cloud Public.\n\nFor on-prem COS, do not make a selection from this list, hit enter.", + "Provider": "IBMCOS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "us-standard", + "Help": "US Cross Region Standard" + }, + { + "Value": "us-vault", + "Help": "US Cross Region Vault" + }, + { + "Value": "us-cold", + "Help": "US Cross Region Cold" + }, + { + "Value": "us-flex", + "Help": "US Cross Region Flex" + }, + { + "Value": "us-east-standard", + "Help": "US East Region Standard" + }, + { + "Value": "us-east-vault", + "Help": "US East Region Vault" + }, + { + "Value": "us-east-cold", + "Help": "US East Region Cold" + }, + { + "Value": "us-east-flex", + "Help": "US East Region Flex" + }, + { + "Value": "us-south-standard", + "Help": "US South Region Standard" + }, + { + "Value": "us-south-vault", + "Help": "US South Region Vault" + }, + { + "Value": "us-south-cold", + "Help": "US South Region Cold" + }, + { + "Value": "us-south-flex", + "Help": "US South Region Flex" + }, + { + "Value": "eu-standard", + "Help": "EU Cross Region Standard" + }, + { + "Value": "eu-vault", + "Help": "EU Cross Region Vault" + }, + { + "Value": "eu-cold", + "Help": "EU Cross Region Cold" + }, + { + "Value": "eu-flex", + "Help": "EU Cross Region Flex" + }, + { + "Value": "eu-gb-standard", + "Help": "Great Britain Standard" + }, + { + "Value": "eu-gb-vault", + "Help": "Great Britain Vault" + }, + { + "Value": "eu-gb-cold", + "Help": "Great Britain Cold" + }, + { + "Value": "eu-gb-flex", + "Help": "Great Britain Flex" + }, + { + "Value": "ap-standard", + "Help": "APAC Standard" + }, + { + "Value": "ap-vault", + "Help": "APAC Vault" + }, + { + "Value": "ap-cold", + "Help": "APAC Cold" + }, + { + "Value": "ap-flex", + "Help": "APAC Flex" + }, + { + "Value": "mel01-standard", + "Help": "Melbourne Standard" + }, + { + "Value": "mel01-vault", + "Help": "Melbourne Vault" + }, + { + "Value": "mel01-cold", + "Help": "Melbourne Cold" + }, + { + "Value": "mel01-flex", + "Help": "Melbourne Flex" + }, + { + "Value": "tor01-standard", + "Help": "Toronto Standard" + }, + { + "Value": "tor01-vault", + "Help": "Toronto Vault" + }, + { + "Value": "tor01-cold", + "Help": "Toronto Cold" + }, + { + "Value": "tor01-flex", + "Help": "Toronto Flex" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "sia", - "Description": "Sia Decentralized Cloud", - "Prefix": "sia", - "Options": [ - { - "Name": "api_url", - "Help": "Sia daemon API URL, like http://sia.daemon.host:9980.\n\nNote that siad must run with --disable-api-security to open API port for other hosts (not recommended).\nKeep default if Sia daemon runs on localhost.", - "Provider": "", - "Default": "http://127.0.0.1:9980", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "http://127.0.0.1:9980", - "ValueStr": "http://127.0.0.1:9980", - "Type": "string" - }, - { - "Name": "api_password", - "Help": "Sia Daemon API Password.\n\nCan be found in the apipassword file located in HOME/.sia/ or in the daemon directory.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user_agent", - "Help": "Siad User Agent\n\nSia daemon requires the 'Sia-Agent' user agent by default for security", - "Provider": "", - "Default": "Sia-Agent", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Sia-Agent", - "ValueStr": "Sia-Agent", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50436354, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Question,Hash,Percent,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,Question,Hash,Percent,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "location_constraint", + "FieldName": "", + "Help": "Location constraint - the location where your bucket will be located and your data stored.\n", + "Provider": "RackCorp", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "global", + "Help": "Global CDN Region" + }, + { + "Value": "au", + "Help": "Australia (All locations)" + }, + { + "Value": "au-nsw", + "Help": "NSW (Australia) Region" + }, + { + "Value": "au-qld", + "Help": "QLD (Australia) Region" + }, + { + "Value": "au-vic", + "Help": "VIC (Australia) Region" + }, + { + "Value": "au-wa", + "Help": "Perth (Australia) Region" + }, + { + "Value": "ph", + "Help": "Manila (Philippines) Region" + }, + { + "Value": "th", + "Help": "Bangkok (Thailand) Region" + }, + { + "Value": "hk", + "Help": "HK (Hong Kong) Region" + }, + { + "Value": "mn", + "Help": "Ulaanbaatar (Mongolia) Region" + }, + { + "Value": "kg", + "Help": "Bishkek (Kyrgyzstan) Region" + }, + { + "Value": "id", + "Help": "Jakarta (Indonesia) Region" + }, + { + "Value": "jp", + "Help": "Tokyo (Japan) Region" + }, + { + "Value": "sg", + "Help": "SG (Singapore) Region" + }, + { + "Value": "de", + "Help": "Frankfurt (Germany) Region" + }, + { + "Value": "us", + "Help": "USA (AnyCast) Region" + }, + { + "Value": "us-east-1", + "Help": "New York (USA) Region" + }, + { + "Value": "us-west-1", + "Help": "Freemont (USA) Region" + }, + { + "Value": "nz", + "Help": "Auckland (New Zealand) Region" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "smb", - "Description": "SMB / CIFS", - "Prefix": "smb", - "Options": [ - { - "Name": "host", - "Help": "SMB server hostname to connect to.\n\nE.g. \"example.com\".", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "SMB username.", - "Provider": "", - "Default": "zenon", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "zenon", - "ValueStr": "zenon", - "Type": "string" - }, - { - "Name": "port", - "Help": "SMB port number.", - "Provider": "", - "Default": 445, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "445", - "ValueStr": "445", - "Type": "int" - }, - { - "Name": "pass", - "Help": "SMB password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "domain", - "Help": "Domain name for NTLM authentication.", - "Provider": "", - "Default": "WORKGROUP", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "WORKGROUP", - "ValueStr": "WORKGROUP", - "Type": "string" - }, - { - "Name": "spn", - "Help": "Service principal name.\n\nRclone presents this name to the server. Some servers use this as further\nauthentication, and it often needs to be set for clusters. For example:\n\n cifs/remotehost:1020\n\nLeave blank if not sure.\n", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "idle_timeout", - "Help": "Max time before closing idle connections.\n\nIf no connections have been returned to the connection pool in the time\ngiven, rclone will empty the connection pool.\n\nSet to 0 to keep connections indefinitely.\n", - "Provider": "", - "Default": 60000000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1m0s", - "ValueStr": "1m0s", - "Type": "Duration" - }, - { - "Name": "hide_special_share", - "Help": "Hide special shares (e.g. print$) which users aren't supposed to access.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "case_insensitive", - "Help": "Whether the server is configured to be case-insensitive.\n\nAlways true on Windows shares.", - "Provider": "", - "Default": true, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "true", - "ValueStr": "true", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 56698766, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,RightSpace,RightPeriod,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,RightSpace,RightPeriod,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "location_constraint", + "FieldName": "", + "Help": "Location constraint - must be set to match the Region.\n\nUsed when creating buckets only.", + "Provider": "Qiniu", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "cn-east-1", + "Help": "East China Region 1" + }, + { + "Value": "cn-east-2", + "Help": "East China Region 2" + }, + { + "Value": "cn-north-1", + "Help": "North China Region 1" + }, + { + "Value": "cn-south-1", + "Help": "South China Region 1" + }, + { + "Value": "us-north-1", + "Help": "North America Region 1" + }, + { + "Value": "ap-southeast-1", + "Help": "Southeast Asia Region 1" + }, + { + "Value": "ap-northeast-1", + "Help": "Northeast Asia Region 1" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "storj", - "Description": "Storj Decentralized Cloud Storage", - "Prefix": "storj", - "Options": [ - { - "Name": "provider", - "Help": "Choose an authentication method.", - "Provider": "", - "Default": "existing", - "Value": null, - "Examples": [ - { - "Value": "existing", - "Help": "Use an existing access grant.", - "Provider": "" - }, - { - "Value": "new", - "Help": "Create a new access grant from satellite address, API key, and passphrase.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "existing", - "ValueStr": "existing", - "Type": "string" - }, - { - "Name": "access_grant", - "Help": "Access grant.", - "Provider": "existing", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "satellite_address", - "Help": "Satellite address.\n\nCustom satellite address should match the format: `\u003cnodeid\u003e@\u003caddress\u003e:\u003cport\u003e`.", - "Provider": "new", - "Default": "us1.storj.io", - "Value": null, - "Examples": [ - { - "Value": "us1.storj.io", - "Help": "US1", - "Provider": "" - }, - { - "Value": "eu1.storj.io", - "Help": "EU1", - "Provider": "" - }, - { - "Value": "ap1.storj.io", - "Help": "AP1", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "us1.storj.io", - "ValueStr": "us1.storj.io", - "Type": "string" - }, - { - "Name": "api_key", - "Help": "API key.", - "Provider": "new", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "passphrase", - "Help": "Encryption passphrase.\n\nTo access existing objects enter passphrase used for uploading.", - "Provider": "new", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "location_constraint", + "FieldName": "", + "Help": "Location constraint - must be set to match the Region.\n\nLeave blank if not sure. Used when creating buckets only.", + "Provider": "!AWS,Alibaba,ArvanCloud,HuaweiOBS,ChinaMobile,Cloudflare,FlashBlade,IBMCOS,IDrive,IONOS,Leviia,Liara,Linode,Magalu,Outscale,Qiniu,RackCorp,Scaleway,Selectel,StackPath,Storj,TencentCOS,Petabox,Mega", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "acl", + "FieldName": "", + "Help": "Canned ACL used when creating buckets and storing or copying objects.\n\nThis ACL is used for creating objects and if bucket_acl isn't set, for creating buckets too.\n\nFor more info visit https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl\n\nNote that this ACL is applied when server-side copying objects as S3\ndoesn't copy the ACL from the source but rather writes a fresh one.\n\nIf the acl is an empty string then no X-Amz-Acl: header is added and\nthe default (private) will be used.\n", + "Provider": "!Storj,Selectel,Synology,Cloudflare,FlashBlade,Mega", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "default", + "Help": "Owner gets Full_CONTROL.\nNo one else has access rights (default).", + "Provider": "TencentCOS" + }, + { + "Value": "private", + "Help": "Owner gets FULL_CONTROL.\nNo one else has access rights (default).", + "Provider": "!IBMCOS,TencentCOS" + }, + { + "Value": "public-read", + "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ access.", + "Provider": "!IBMCOS" + }, + { + "Value": "public-read-write", + "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ and WRITE access.\nGranting this on a bucket is generally not recommended.", + "Provider": "!IBMCOS" + }, + { + "Value": "authenticated-read", + "Help": "Owner gets FULL_CONTROL.\nThe AuthenticatedUsers group gets READ access.", + "Provider": "!IBMCOS" + }, + { + "Value": "bucket-owner-read", + "Help": "Object owner gets FULL_CONTROL.\nBucket owner gets READ access.\nIf you specify this canned ACL when creating a bucket, Amazon S3 ignores it.", + "Provider": "!IBMCOS,ChinaMobile" + }, + { + "Value": "bucket-owner-full-control", + "Help": "Both the object owner and the bucket owner get FULL_CONTROL over the object.\nIf you specify this canned ACL when creating a bucket, Amazon S3 ignores it.", + "Provider": "!IBMCOS,ChinaMobile" + }, + { + "Value": "private", + "Help": "Owner gets FULL_CONTROL.\nNo one else has access rights (default).\nThis acl is available on IBM Cloud (Infra), IBM Cloud (Storage), On-Premise COS.", + "Provider": "IBMCOS" + }, + { + "Value": "public-read", + "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ access.\nThis acl is available on IBM Cloud (Infra), IBM Cloud (Storage), On-Premise IBM COS.", + "Provider": "IBMCOS" + }, + { + "Value": "public-read-write", + "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ and WRITE access.\nThis acl is available on IBM Cloud (Infra), On-Premise IBM COS.", + "Provider": "IBMCOS" + }, + { + "Value": "authenticated-read", + "Help": "Owner gets FULL_CONTROL.\nThe AuthenticatedUsers group gets READ access.\nNot supported on Buckets.\nThis acl is available on IBM Cloud (Infra) and On-Premise IBM COS.", + "Provider": "IBMCOS" + } ], - "CommandHelp": null, - "Aliases": [ - "tardigrade" + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "bucket_acl", + "FieldName": "", + "Help": "Canned ACL used when creating buckets.\n\nFor more info visit https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl\n\nNote that this ACL is applied when only when creating buckets. If it\nisn't set then \"acl\" is used instead.\n\nIf the \"acl\" and \"bucket_acl\" are empty strings then no X-Amz-Acl:\nheader is added and the default (private) will be used.\n", + "Provider": "!Storj,Selectel,Synology,Cloudflare,FlashBlade", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "private", + "Help": "Owner gets FULL_CONTROL.\nNo one else has access rights (default)." + }, + { + "Value": "public-read", + "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ access." + }, + { + "Value": "public-read-write", + "Help": "Owner gets FULL_CONTROL.\nThe AllUsers group gets READ and WRITE access.\nGranting this on a bucket is generally not recommended." + }, + { + "Value": "authenticated-read", + "Help": "Owner gets FULL_CONTROL.\nThe AuthenticatedUsers group gets READ access." + } ], - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "tardigrade", - "Description": "Storj Decentralized Cloud Storage", - "Prefix": "tardigrade", - "Options": [ - { - "Name": "provider", - "Help": "Choose an authentication method.", - "Provider": "", - "Default": "existing", - "Value": null, - "Examples": [ - { - "Value": "existing", - "Help": "Use an existing access grant.", - "Provider": "" - }, - { - "Value": "new", - "Help": "Create a new access grant from satellite address, API key, and passphrase.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "existing", - "ValueStr": "existing", - "Type": "string" - }, - { - "Name": "access_grant", - "Help": "Access grant.", - "Provider": "existing", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "satellite_address", - "Help": "Satellite address.\n\nCustom satellite address should match the format: `\u003cnodeid\u003e@\u003caddress\u003e:\u003cport\u003e`.", - "Provider": "new", - "Default": "us1.storj.io", - "Value": null, - "Examples": [ - { - "Value": "us1.storj.io", - "Help": "US1", - "Provider": "" - }, - { - "Value": "eu1.storj.io", - "Help": "EU1", - "Provider": "" - }, - { - "Value": "ap1.storj.io", - "Help": "AP1", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "us1.storj.io", - "ValueStr": "us1.storj.io", - "Type": "string" - }, - { - "Name": "api_key", - "Help": "API key.", - "Provider": "new", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "passphrase", - "Help": "Encryption passphrase.\n\nTo access existing objects enter passphrase used for uploading.", - "Provider": "new", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 3, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "requester_pays", + "FieldName": "", + "Help": "Enables requester pays option when interacting with S3 bucket.", + "Provider": "AWS", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "server_side_encryption", + "FieldName": "", + "Help": "The server-side encryption algorithm used when storing this object in S3.", + "Provider": "AWS,Ceph,ChinaMobile,Minio", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + }, + { + "Value": "AES256", + "Help": "AES256" + }, + { + "Value": "aws:kms", + "Help": "aws:kms", + "Provider": "!ChinaMobile" + } ], - "CommandHelp": null, - "Aliases": [ - "tardigrade" + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_customer_algorithm", + "FieldName": "", + "Help": "If using SSE-C, the server-side encryption algorithm used when storing this object in S3.", + "Provider": "AWS,Ceph,ChinaMobile,Minio", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + }, + { + "Value": "AES256", + "Help": "AES256" + } ], - "Hide": true, - "MetadataInfo": null - }, - { - "Name": "sugarsync", - "Description": "Sugarsync", - "Prefix": "sugarsync", - "Options": [ - { - "Name": "app_id", - "Help": "Sugarsync App ID.\n\nLeave blank to use rclone's.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "access_key_id", - "Help": "Sugarsync Access Key ID.\n\nLeave blank to use rclone's.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "private_access_key", - "Help": "Sugarsync Private Access Key.\n\nLeave blank to use rclone's.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "hard_delete", - "Help": "Permanently delete files if true\notherwise put them in the deleted files.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "refresh_token", - "Help": "Sugarsync refresh token.\n\nLeave blank normally, will be auto configured by rclone.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "authorization", - "Help": "Sugarsync authorization.\n\nLeave blank normally, will be auto configured by rclone.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "authorization_expiry", - "Help": "Sugarsync authorization expiry.\n\nLeave blank normally, will be auto configured by rclone.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "Sugarsync user.\n\nLeave blank normally, will be auto configured by rclone.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "root_id", - "Help": "Sugarsync root id.\n\nLeave blank normally, will be auto configured by rclone.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "deleted_id", - "Help": "Sugarsync deleted folder id.\n\nLeave blank normally, will be auto configured by rclone.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50397186, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_kms_key_id", + "FieldName": "", + "Help": "If using KMS ID you must provide the ARN of Key.", + "Provider": "AWS,Ceph,Minio", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + }, + { + "Value": "arn:aws:kms:us-east-1:*", + "Help": "arn:aws:kms:*" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "swift", - "Description": "OpenStack Swift (Rackspace Cloud Files, Blomp Cloud Storage, Memset Memstore, OVH)", - "Prefix": "swift", - "Options": [ - { - "Name": "env_auth", - "Help": "Get swift credentials from environment variables in standard OpenStack form.", - "Provider": "", - "Default": false, - "Value": null, - "Examples": [ - { - "Value": "false", - "Help": "Enter swift credentials in the next step.", - "Provider": "" - }, - { - "Value": "true", - "Help": "Get swift credentials from environment vars.\nLeave other fields blank if using this.", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "user", - "Help": "User name to log in (OS_USERNAME).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "key", - "Help": "API key or password (OS_PASSWORD).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth", - "Help": "Authentication URL for server (OS_AUTH_URL).", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "https://auth.api.rackspacecloud.com/v1.0", - "Help": "Rackspace US", - "Provider": "" - }, - { - "Value": "https://lon.auth.api.rackspacecloud.com/v1.0", - "Help": "Rackspace UK", - "Provider": "" - }, - { - "Value": "https://identity.api.rackspacecloud.com/v2.0", - "Help": "Rackspace v2", - "Provider": "" - }, - { - "Value": "https://auth.storage.memset.com/v1.0", - "Help": "Memset Memstore UK", - "Provider": "" - }, - { - "Value": "https://auth.storage.memset.com/v2.0", - "Help": "Memset Memstore UK v2", - "Provider": "" - }, - { - "Value": "https://auth.cloud.ovh.net/v3", - "Help": "OVH", - "Provider": "" - }, - { - "Value": "https://authenticate.ain.net", - "Help": "Blomp Cloud Storage", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user_id", - "Help": "User ID to log in - optional - most swift systems use user and leave this blank (v3 auth) (OS_USER_ID).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "domain", - "Help": "User domain - optional (v3 auth) (OS_USER_DOMAIN_NAME)", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "tenant", - "Help": "Tenant name - optional for v1 auth, this or tenant_id required otherwise (OS_TENANT_NAME or OS_PROJECT_NAME).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "tenant_id", - "Help": "Tenant ID - optional for v1 auth, this or tenant required otherwise (OS_TENANT_ID).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "tenant_domain", - "Help": "Tenant domain - optional (v3 auth) (OS_PROJECT_DOMAIN_NAME).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Region name - optional (OS_REGION_NAME).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "storage_url", - "Help": "Storage URL - optional (OS_STORAGE_URL).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_token", - "Help": "Auth Token from alternate authentication - optional (OS_AUTH_TOKEN).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "application_credential_id", - "Help": "Application Credential ID (OS_APPLICATION_CREDENTIAL_ID).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "application_credential_name", - "Help": "Application Credential Name (OS_APPLICATION_CREDENTIAL_NAME).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "application_credential_secret", - "Help": "Application Credential Secret (OS_APPLICATION_CREDENTIAL_SECRET).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_version", - "Help": "AuthVersion - optional - set to (1,2,3) if your auth URL has no version (ST_AUTH_VERSION).", - "Provider": "", - "Default": 0, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "0", - "ValueStr": "0", - "Type": "int" - }, - { - "Name": "endpoint_type", - "Help": "Endpoint type to choose from the service catalogue (OS_ENDPOINT_TYPE).", - "Provider": "", - "Default": "public", - "Value": null, - "Examples": [ - { - "Value": "public", - "Help": "Public (default, choose this if not sure)", - "Provider": "" - }, - { - "Value": "internal", - "Help": "Internal (use internal service net)", - "Provider": "" - }, - { - "Value": "admin", - "Help": "Admin", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "public", - "ValueStr": "public", - "Type": "string" - }, - { - "Name": "leave_parts_on_error", - "Help": "If true avoid calling abort upload on a failure.\n\nIt should be set to true for resuming uploads across different sessions.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "storage_policy", - "Help": "The storage policy to use when creating a new container.\n\nThis applies the specified storage policy when creating a new\ncontainer. The policy cannot be changed afterwards. The allowed\nconfiguration values and their meaning depend on your Swift storage\nprovider.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "", - "Help": "Default", - "Provider": "" - }, - { - "Value": "pcs", - "Help": "OVH Public Cloud Storage", - "Provider": "" - }, - { - "Value": "pca", - "Help": "OVH Public Cloud Archive", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "chunk_size", - "Help": "Above this size files will be chunked into a _segments container.\n\nAbove this size files will be chunked into a _segments container. The\ndefault for this is 5 GiB which is its maximum value.", - "Provider": "", - "Default": 5368709120, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "5Gi", - "ValueStr": "5Gi", - "Type": "SizeSuffix" - }, - { - "Name": "no_chunk", - "Help": "Don't chunk files during streaming upload.\n\nWhen doing streaming uploads (e.g. using rcat or mount) setting this\nflag will cause the swift backend to not upload chunked files.\n\nThis will limit the maximum upload size to 5 GiB. However non chunked\nfiles are easier to deal with and have an MD5SUM.\n\nRclone will still chunk files bigger than chunk_size when doing normal\ncopy operations.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "no_large_objects", - "Help": "Disable support for static and dynamic large objects\n\nSwift cannot transparently store files bigger than 5 GiB. There are\ntwo schemes for doing that, static or dynamic large objects, and the\nAPI does not allow rclone to determine whether a file is a static or\ndynamic large object without doing a HEAD on the object. Since these\nneed to be treated differently, this means rclone has to issue HEAD\nrequests for objects for example when reading checksums.\n\nWhen `no_large_objects` is set, rclone will assume that there are no\nstatic or dynamic large objects stored. This means it can stop doing\nthe extra HEAD calls which in turn increases performance greatly\nespecially when doing a swift to swift transfer with `--checksum` set.\n\nSetting this option implies `no_chunk` and also that no files will be\nuploaded in chunks, so files bigger than 5 GiB will just fail on\nupload.\n\nIf you set this option and there *are* static or dynamic large objects,\nthen this will give incorrect hashes for them. Downloads will succeed,\nbut other operations such as Remove and Copy will fail.\n", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 16777218, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,InvalidUtf8", - "ValueStr": "Slash,InvalidUtf8", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_customer_key", + "FieldName": "", + "Help": "To use SSE-C you may provide the secret encryption key used to encrypt/decrypt your data.\n\nAlternatively you can provide --sse-customer-key-base64.", + "Provider": "AWS,Ceph,ChinaMobile,Minio", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "union", - "Description": "Union merges the contents of several upstream fs", - "Prefix": "union", - "Options": [ - { - "Name": "upstreams", - "Help": "List of space separated upstreams.\n\nCan be 'upstreama:test/dir upstreamb:', '\"upstreama:test/space:ro dir\" upstreamb:', etc.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "action_policy", - "Help": "Policy to choose upstream on ACTION category.", - "Provider": "", - "Default": "epall", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "epall", - "ValueStr": "epall", - "Type": "string" - }, - { - "Name": "create_policy", - "Help": "Policy to choose upstream on CREATE category.", - "Provider": "", - "Default": "epmfs", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "epmfs", - "ValueStr": "epmfs", - "Type": "string" - }, - { - "Name": "search_policy", - "Help": "Policy to choose upstream on SEARCH category.", - "Provider": "", - "Default": "ff", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "ff", - "ValueStr": "ff", - "Type": "string" - }, - { - "Name": "cache_time", - "Help": "Cache time of usage and free space (in seconds).\n\nThis option is only useful when a path preserving policy is used.", - "Provider": "", - "Default": 120, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "120", - "ValueStr": "120", - "Type": "int" - }, - { - "Name": "min_free_space", - "Help": "Minimum viable free space for lfs/eplfs policies.\n\nIf a remote has less than this much free space then it won't be\nconsidered for use in lfs or eplfs policies.", - "Provider": "", - "Default": 1073741824, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "1Gi", - "ValueStr": "1Gi", - "Type": "SizeSuffix" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_customer_key_base64", + "FieldName": "", + "Help": "If using SSE-C you must provide the secret encryption key encoded in base64 format to encrypt/decrypt your data.\n\nAlternatively you can provide --sse-customer-key.", + "Provider": "AWS,Ceph,ChinaMobile,Minio", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": { - "System": null, - "Help": "Any metadata supported by the underlying remote is read and written." + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sse_customer_key_md5", + "FieldName": "", + "Help": "If using SSE-C you may provide the secret encryption key MD5 checksum (optional).\n\nIf you leave it blank, this is calculated automatically from the sse_customer_key provided.\n", + "Provider": "AWS,Ceph,ChinaMobile,Minio", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "None" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in S3.", + "Provider": "AWS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Default" + }, + { + "Value": "STANDARD", + "Help": "Standard storage class" + }, + { + "Value": "REDUCED_REDUNDANCY", + "Help": "Reduced redundancy storage class" + }, + { + "Value": "STANDARD_IA", + "Help": "Standard Infrequent Access storage class" + }, + { + "Value": "ONEZONE_IA", + "Help": "One Zone Infrequent Access storage class" + }, + { + "Value": "GLACIER", + "Help": "Glacier Flexible Retrieval storage class" + }, + { + "Value": "DEEP_ARCHIVE", + "Help": "Glacier Deep Archive storage class" + }, + { + "Value": "INTELLIGENT_TIERING", + "Help": "Intelligent-Tiering storage class" + }, + { + "Value": "GLACIER_IR", + "Help": "Glacier Instant Retrieval storage class" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in OSS.", + "Provider": "Alibaba", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Default" + }, + { + "Value": "STANDARD", + "Help": "Standard storage class" + }, + { + "Value": "GLACIER", + "Help": "Archive storage mode" + }, + { + "Value": "STANDARD_IA", + "Help": "Infrequent access storage mode" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in ChinaMobile.", + "Provider": "ChinaMobile", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Default" + }, + { + "Value": "STANDARD", + "Help": "Standard storage class" + }, + { + "Value": "GLACIER", + "Help": "Archive storage mode" + }, + { + "Value": "STANDARD_IA", + "Help": "Infrequent access storage mode" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in Liara", + "Provider": "Liara", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "STANDARD", + "Help": "Standard storage class" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in ArvanCloud.", + "Provider": "ArvanCloud", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "STANDARD", + "Help": "Standard storage class" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in Magalu.", + "Provider": "Magalu", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "STANDARD", + "Help": "Standard storage class" + }, + { + "Value": "GLACIER_IR", + "Help": "Glacier Instant Retrieval storage class" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in Tencent COS.", + "Provider": "TencentCOS", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Default" + }, + { + "Value": "STANDARD", + "Help": "Standard storage class" + }, + { + "Value": "ARCHIVE", + "Help": "Archive storage mode" + }, + { + "Value": "STANDARD_IA", + "Help": "Infrequent access storage mode" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in S3.", + "Provider": "Scaleway", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Default." + }, + { + "Value": "STANDARD", + "Help": "The Standard class for any upload.\nSuitable for on-demand content like streaming or CDN.\nAvailable in all regions." + }, + { + "Value": "GLACIER", + "Help": "Archived storage.\nPrices are lower, but it needs to be restored first to be accessed.\nAvailable in FR-PAR and NL-AMS regions." + }, + { + "Value": "ONEZONE_IA", + "Help": "One Zone - Infrequent Access.\nA good choice for storing secondary backup copies or easily re-creatable data.\nAvailable in the FR-PAR region only." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_class", + "FieldName": "", + "Help": "The storage class to use when storing new objects in Qiniu.", + "Provider": "Qiniu", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "STANDARD", + "Help": "Standard storage class" + }, + { + "Value": "LINE", + "Help": "Infrequent access storage mode" + }, + { + "Value": "GLACIER", + "Help": "Archive storage mode" + }, + { + "Value": "DEEP_ARCHIVE", + "Help": "Deep archive storage mode" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to chunked upload.\n\nAny files larger than this will be uploaded in chunks of chunk_size.\nThe minimum is 0 and the maximum is 5 GiB.", + "Default": 209715200, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "200Mi", + "ValueStr": "200Mi", + "Type": "SizeSuffix" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Chunk size to use for uploading.\n\nWhen uploading files larger than upload_cutoff or files with unknown\nsize (e.g. from \"rclone rcat\" or uploaded with \"rclone mount\" or google\nphotos or google docs) they will be uploaded as multipart uploads\nusing this chunk size.\n\nNote that \"--s3-upload-concurrency\" chunks of this size are buffered\nin memory per transfer.\n\nIf you are transferring large files over high-speed links and you have\nenough memory, then increasing this will speed up the transfers.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of known size to stay below the 10,000 chunks limit.\n\nFiles of unknown size are uploaded with the configured\nchunk_size. Since the default chunk size is 5 MiB and there can be at\nmost 10,000 chunks, this means that by default the maximum size of\na file you can stream upload is 48 GiB. If you wish to stream upload\nlarger files then you will need to increase chunk_size.\n\nIncreasing the chunk size decreases the accuracy of the progress\nstatistics displayed with \"-P\" flag. Rclone treats chunk as sent when\nit's buffered by the AWS SDK, when in fact it may still be uploading.\nA bigger chunk size means a bigger AWS SDK buffer and progress\nreporting more deviating from the truth.\n", + "Default": 5242880, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "5Mi", + "ValueStr": "5Mi", + "Type": "SizeSuffix" + }, + { + "Name": "max_upload_parts", + "FieldName": "", + "Help": "Maximum number of parts in a multipart upload.\n\nThis option defines the maximum number of multipart chunks to use\nwhen doing a multipart upload.\n\nThis can be useful if a service does not support the AWS S3\nspecification of 10,000 chunks.\n\nRclone will automatically increase the chunk size when uploading a\nlarge file of a known size to stay below this number of chunks limit.\n", + "Default": 10000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10000", + "ValueStr": "10000", + "Type": "int" + }, + { + "Name": "copy_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to multipart copy.\n\nAny files larger than this that need to be server-side copied will be\ncopied in chunks of this size.\n\nThe minimum is 0 and the maximum is 5 GiB.", + "Default": 4999610368, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4.656Gi", + "ValueStr": "4.656Gi", + "Type": "SizeSuffix" + }, + { + "Name": "disable_checksum", + "FieldName": "", + "Help": "Don't store MD5 checksum with object metadata.\n\nNormally rclone will calculate the MD5 checksum of the input before\nuploading it so it can add it to metadata on the object. This is great\nfor data integrity checking but can cause long delays for large files\nto start uploading.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "shared_credentials_file", + "FieldName": "", + "Help": "Path to the shared credentials file.\n\nIf env_auth = true then rclone can use a shared credentials file.\n\nIf this variable is empty rclone will look for the\n\"AWS_SHARED_CREDENTIALS_FILE\" env variable. If the env value is empty\nit will default to the current user's home directory.\n\n Linux/OSX: \"$HOME/.aws/credentials\"\n Windows: \"%USERPROFILE%\\.aws\\credentials\"\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "profile", + "FieldName": "", + "Help": "Profile to use in the shared credentials file.\n\nIf env_auth = true then rclone can use a shared credentials file. This\nvariable controls which profile is used in that file.\n\nIf empty it will default to the environment variable \"AWS_PROFILE\" or\n\"default\" if that environment variable is also not set.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "session_token", + "FieldName": "", + "Help": "An AWS session token.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "upload_concurrency", + "FieldName": "", + "Help": "Concurrency for multipart uploads and copies.\n\nThis is the number of chunks of the same file that are uploaded\nconcurrently for multipart uploads and copies.\n\nIf you are uploading small numbers of large files over high-speed links\nand these uploads do not fully utilize your bandwidth, then increasing\nthis may help to speed up the transfers.", + "Default": 4, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "4", + "ValueStr": "4", + "Type": "int" + }, + { + "Name": "force_path_style", + "FieldName": "", + "Help": "If true use path style access if false use virtual hosted style.\n\nIf this is true (the default) then rclone will use path style access,\nif false then rclone will use virtual path style. See [the AWS S3\ndocs](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingBucket.html#access-bucket-intro)\nfor more info.\n\nSome providers (e.g. AWS, Aliyun OSS, Netease COS, or Tencent COS) require this set to\nfalse - rclone will do this automatically based on the provider\nsetting.\n\nNote that if your bucket isn't a valid DNS name, i.e. has '.' or '_' in,\nyou'll need to set this to true.\n", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "v2_auth", + "FieldName": "", + "Help": "If true use v2 authentication.\n\nIf this is false (the default) then rclone will use v4 authentication.\nIf it is set then rclone will use v2 authentication.\n\nUse this only if v4 signatures don't work, e.g. pre Jewel/v10 CEPH.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_dual_stack", + "FieldName": "", + "Help": "If true use AWS S3 dual-stack endpoint (IPv6 support).\n\nSee [AWS Docs on Dualstack Endpoints](https://docs.aws.amazon.com/AmazonS3/latest/userguide/dual-stack-endpoints.html)", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_accelerate_endpoint", + "FieldName": "", + "Help": "If true use the AWS S3 accelerated endpoint.\n\nSee: [AWS S3 Transfer acceleration](https://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration-examples.html)", + "Provider": "AWS", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "leave_parts_on_error", + "FieldName": "", + "Help": "If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery.\n\nIt should be set to true for resuming uploads across different sessions.\n\nWARNING: Storing parts of an incomplete multipart upload counts towards space usage on S3 and will add additional costs if not cleaned up.\n", + "Provider": "AWS", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "list_chunk", + "FieldName": "", + "Help": "Size of listing chunk (response list for each ListObject S3 request).\n\nThis option is also known as \"MaxKeys\", \"max-items\", or \"page-size\" from the AWS S3 specification.\nMost services truncate the response list to 1000 objects even if requested more than that.\nIn AWS S3 this is a global maximum and cannot be changed, see [AWS S3](https://docs.aws.amazon.com/cli/latest/reference/s3/ls.html).\nIn Ceph, this can be increased with the \"rgw list buckets max chunk\" option.\n", + "Default": 1000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1000", + "ValueStr": "1000", + "Type": "int" + }, + { + "Name": "list_version", + "FieldName": "", + "Help": "Version of ListObjects to use: 1,2 or 0 for auto.\n\nWhen S3 originally launched it only provided the ListObjects call to\nenumerate objects in a bucket.\n\nHowever in May 2016 the ListObjectsV2 call was introduced. This is\nmuch higher performance and should be used if at all possible.\n\nIf set to the default, 0, rclone will guess according to the provider\nset which list objects method to call. If it guesses wrong, then it\nmay be set manually here.\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "list_url_encode", + "FieldName": "", + "Help": "Whether to url encode listings: true/false/unset\n\nSome providers support URL encoding listings and where this is\navailable this is more reliable when using control characters in file\nnames. If this is set to unset (the default) then rclone will choose\naccording to the provider setting what to apply, but you can override\nrclone's choice here.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "no_check_bucket", + "FieldName": "", + "Help": "If set, don't attempt to check the bucket exists or create it.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does if you know the bucket exists already.\n\nIt can also be needed if the user you are using does not have bucket\ncreation permissions. Before v1.52.0 this would have passed silently\ndue to a bug.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_head", + "FieldName": "", + "Help": "If set, don't HEAD uploaded objects to check integrity.\n\nThis can be useful when trying to minimise the number of transactions\nrclone does.\n\nSetting it means that if rclone receives a 200 OK message after\nuploading an object with PUT then it will assume that it got uploaded\nproperly.\n\nIn particular it will assume:\n\n- the metadata, including modtime, storage class and content type was as uploaded\n- the size was as uploaded\n\nIt reads the following items from the response for a single part PUT:\n\n- the MD5SUM\n- The uploaded date\n\nFor multipart uploads these items aren't read.\n\nIf an source object of unknown length is uploaded then rclone **will** do a\nHEAD request.\n\nSetting this flag increases the chance for undetected upload failures,\nin particular an incorrect size, so it isn't recommended for normal\noperation. In practice the chance of an undetected upload failure is\nvery small even with this flag.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_head_object", + "FieldName": "", + "Help": "If set, do not do HEAD before GET when getting objects.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50331650, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,InvalidUtf8,Dot", + "ValueStr": "Slash,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "memory_pool_flush_time", + "FieldName": "", + "Help": "How often internal memory buffer pools will be flushed. (no longer used)", + "Default": 60000000000, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "memory_pool_use_mmap", + "FieldName": "", + "Help": "Whether to use mmap buffers in internal memory pool. (no longer used)", + "Default": false, + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_http2", + "FieldName": "", + "Help": "Disable usage of http2 for S3 backends.\n\nThere is currently an unsolved issue with the s3 (specifically minio) backend\nand HTTP/2. HTTP/2 is enabled by default for the s3 backend but can be\ndisabled here. When the issue is solved this flag will be removed.\n\nSee: https://github.com/rclone/rclone/issues/4673, https://github.com/rclone/rclone/issues/3631\n\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "download_url", + "FieldName": "", + "Help": "Custom endpoint for downloads.\nThis is usually set to a CloudFront CDN URL as AWS S3 offers\ncheaper egress for data downloaded through the CloudFront network.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "directory_markers", + "FieldName": "", + "Help": "Upload an empty object with a trailing slash when a new directory is created\n\nEmpty folders are unsupported for bucket based remotes, this option creates an empty\nobject ending with \"/\", to persist the folder.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_multipart_etag", + "FieldName": "", + "Help": "Whether to use ETag in multipart uploads for verification\n\nThis should be true, false or left unset to use the default for the provider.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "use_unsigned_payload", + "FieldName": "", + "Help": "Whether to use an unsigned payload in PutObject\n\nRclone has to avoid the AWS SDK seeking the body when calling\nPutObject. The AWS provider can add checksums in the trailer to avoid\nseeking but other providers can't.\n\nThis should be true, false or left unset to use the default for the provider.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "use_presigned_request", + "FieldName": "", + "Help": "Whether to use a presigned request or PutObject for single part uploads\n\nIf this is false rclone will use PutObject from the AWS SDK to upload\nan object.\n\nVersions of rclone < 1.59 use presigned requests to upload a single\npart object and setting this flag to true will re-enable that\nfunctionality. This shouldn't be necessary except in exceptional\ncircumstances or for testing.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "versions", + "FieldName": "", + "Help": "Include old versions in directory listings.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "version_at", + "FieldName": "", + "Help": "Show file versions as they were at the specified time.\n\nThe parameter should be a date, \"2006-01-02\", datetime \"2006-01-02\n15:04:05\" or a duration for that long ago, eg \"100d\" or \"1h\".\n\nNote that when using this no file write operations are permitted,\nso you can't upload files or delete them.\n\nSee [the time option docs](/docs/#time-option) for valid formats.\n", + "Default": "0001-01-01T00:00:00Z", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "off", + "ValueStr": "off", + "Type": "Time" + }, + { + "Name": "version_deleted", + "FieldName": "", + "Help": "Show deleted file markers when using versions.\n\nThis shows deleted file markers in the listing when using versions. These will appear\nas 0 size files. The only operation which can be performed on them is deletion.\n\nDeleting a delete marker will reveal the previous version.\n\nDeleted files will always show with a timestamp.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "decompress", + "FieldName": "", + "Help": "If set this will decompress gzip encoded objects.\n\nIt is possible to upload objects to S3 with \"Content-Encoding: gzip\"\nset. Normally rclone will download these files as compressed objects.\n\nIf this flag is set then rclone will decompress these files with\n\"Content-Encoding: gzip\" as they are received. This means that rclone\ncan't check the size and hash but the file contents will be decompressed.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "might_gzip", + "FieldName": "", + "Help": "Set this if the backend might gzip objects.\n\nNormally providers will not alter objects when they are downloaded. If\nan object was not uploaded with `Content-Encoding: gzip` then it won't\nbe set on download.\n\nHowever some providers may gzip objects even if they weren't uploaded\nwith `Content-Encoding: gzip` (eg Cloudflare).\n\nA symptom of this would be receiving errors like\n\n ERROR corrupted on transfer: sizes differ NNN vs MMM\n\nIf you set this flag and rclone downloads an object with\nContent-Encoding: gzip set and chunked transfer encoding, then rclone\nwill decompress the object on the fly.\n\nIf this is set to unset (the default) then rclone will choose\naccording to the provider setting what to apply, but you can override\nrclone's choice here.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "use_accept_encoding_gzip", + "FieldName": "", + "Help": "Whether to send `Accept-Encoding: gzip` header.\n\nBy default, rclone will append `Accept-Encoding: gzip` to the request to download\ncompressed objects whenever possible.\n\nHowever some providers such as Google Cloud Storage may alter the HTTP headers, breaking\nthe signature of the request.\n\nA symptom of this would be receiving errors like\n\n\tSignatureDoesNotMatch: The request signature we calculated does not match the signature you provided.\n\nIn this case, you might want to try disabling this option.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "no_system_metadata", + "FieldName": "", + "Help": "Suppress setting and reading of system metadata", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "sts_endpoint", + "FieldName": "", + "Help": "Endpoint for STS (deprecated).\n\nLeave blank if using AWS to use the default endpoint for the region.", + "Provider": "AWS", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "use_already_exists", + "FieldName": "", + "Help": "Set if rclone should report BucketAlreadyExists errors on bucket creation.\n\nAt some point during the evolution of the s3 protocol, AWS started\nreturning an `AlreadyOwnedByYou` error when attempting to create a\nbucket that the user already owned, rather than a\n`BucketAlreadyExists` error.\n\nUnfortunately exactly what has been implemented by s3 clones is a\nlittle inconsistent, some return `AlreadyOwnedByYou`, some return\n`BucketAlreadyExists` and some return no error at all.\n\nThis is important to rclone because it ensures the bucket exists by\ncreating it on quite a lot of operations (unless\n`--s3-no-check-bucket` is used).\n\nIf rclone knows the provider can return `AlreadyOwnedByYou` or returns\nno error then it can report `BucketAlreadyExists` errors when the user\nattempts to create a bucket not owned by them. Otherwise rclone\nignores the `BucketAlreadyExists` error which can lead to confusion.\n\nThis should be automatically set correctly for all providers rclone\nknows about - please make a bug report if not.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "use_multipart_uploads", + "FieldName": "", + "Help": "Set if rclone should use multipart uploads.\n\nYou can change this if you want to disable the use of multipart uploads.\nThis shouldn't be necessary in normal operation.\n\nThis should be automatically set correctly for all providers rclone\nknows about - please make a bug report if not.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "use_x_id", + "FieldName": "", + "Help": "Set if rclone should add x-id URL parameters.\n\nYou can change this if you want to disable the AWS SDK from\nadding x-id URL parameters.\n\nThis shouldn't be necessary in normal operation.\n\nThis should be automatically set correctly for all providers rclone\nknows about - please make a bug report if not.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "sign_accept_encoding", + "FieldName": "", + "Help": "Set if rclone should include Accept-Encoding as part of the signature.\n\nYou can change this if you want to stop rclone including\nAccept-Encoding as part of the signature.\n\nThis shouldn't be necessary in normal operation.\n\nThis should be automatically set correctly for all providers rclone\nknows about - please make a bug report if not.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "directory_bucket", + "FieldName": "", + "Help": "Set to use AWS Directory Buckets\n\nIf you are using an AWS Directory Bucket then set this flag.\n\nThis will ensure no `Content-Md5` headers are sent and ensure `ETag`\nheaders are not interpreted as MD5 sums. `X-Amz-Meta-Md5chksum` will\nbe set on all objects whether single or multipart uploaded.\n\nThis also sets `no_check_bucket = true`.\n\nNote that Directory Buckets do not support:\n\n- Versioning\n- `Content-Encoding: gzip`\n\nRclone limitations with Directory Buckets:\n\n- rclone does not support creating Directory Buckets with `rclone mkdir`\n- ... or removing them with `rclone rmdir` yet\n- Directory Buckets do not appear when doing `rclone lsf` at the top level.\n- Rclone can't remove auto created directories yet. In theory this should\n work with `directory_markers = true` but it doesn't.\n- Directories don't seem to appear in recursive (ListR) listings.\n", + "Provider": "AWS", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "sdk_log_mode", + "FieldName": "", + "Help": "Set to debug the SDK\n\nThis can be set to a comma separated list of the following functions:\n\n- `Signing`\n- `Retries`\n- `Request`\n- `RequestWithBody`\n- `Response`\n- `ResponseWithBody`\n- `DeprecatedUsage`\n- `RequestEventMessage`\n- `ResponseEventMessage`\n\nUse `Off` to disable and `All` to set all log levels. You will need to\nuse `-vv` to see the debug level logs.\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Off", + "ValueStr": "Off", + "Type": "Bits" + }, + { + "Name": "ibm_api_key", + "FieldName": "", + "Help": "IBM API Key to be used to obtain IAM token", + "Provider": "IBMCOS", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "ibm_resource_instance_id", + "FieldName": "", + "Help": "IBM service instance id", + "Provider": "IBMCOS", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": [ + { + "Name": "restore", + "Short": "Restore objects from GLACIER or INTELLIGENT-TIERING archive tier", + "Long": "This command can be used to restore one or more objects from GLACIER to normal storage \nor from INTELLIGENT-TIERING Archive Access / Deep Archive Access tier to the Frequent Access tier.\n\nUsage Examples:\n\n rclone backend restore s3:bucket/path/to/ --include /object -o priority=PRIORITY -o lifetime=DAYS\n rclone backend restore s3:bucket/path/to/directory -o priority=PRIORITY -o lifetime=DAYS\n rclone backend restore s3:bucket -o priority=PRIORITY -o lifetime=DAYS\n rclone backend restore s3:bucket/path/to/directory -o priority=PRIORITY\n\nThis flag also obeys the filters. Test first with --interactive/-i or --dry-run flags\n\n rclone --interactive backend restore --include \"*.txt\" s3:bucket/path -o priority=Standard -o lifetime=1\n\nAll the objects shown will be marked for restore, then\n\n rclone backend restore --include \"*.txt\" s3:bucket/path -o priority=Standard -o lifetime=1\n\nIt returns a list of status dictionaries with Remote and Status\nkeys. The Status will be OK if it was successful or an error message\nif not.\n\n [\n {\n \"Status\": \"OK\",\n \"Remote\": \"test.txt\"\n },\n {\n \"Status\": \"OK\",\n \"Remote\": \"test/file4.txt\"\n }\n ]\n\n", + "Opts": { + "description": "The optional description for the job.", + "lifetime": "Lifetime of the active copy in days, ignored for INTELLIGENT-TIERING storage", + "priority": "Priority of restore: Standard|Expedited|Bulk" + } + }, + { + "Name": "restore-status", + "Short": "Show the restore status for objects being restored from GLACIER or INTELLIGENT-TIERING storage", + "Long": "This command can be used to show the status for objects being restored from GLACIER to normal storage\nor from INTELLIGENT-TIERING Archive Access / Deep Archive Access tier to the Frequent Access tier.\n\nUsage Examples:\n\n rclone backend restore-status s3:bucket/path/to/object\n rclone backend restore-status s3:bucket/path/to/directory\n rclone backend restore-status -o all s3:bucket/path/to/directory\n\nThis command does not obey the filters.\n\nIt returns a list of status dictionaries.\n\n [\n {\n \"Remote\": \"file.txt\",\n \"VersionID\": null,\n \"RestoreStatus\": {\n \"IsRestoreInProgress\": true,\n \"RestoreExpiryDate\": \"2023-09-06T12:29:19+01:00\"\n },\n \"StorageClass\": \"GLACIER\"\n },\n {\n \"Remote\": \"test.pdf\",\n \"VersionID\": null,\n \"RestoreStatus\": {\n \"IsRestoreInProgress\": false,\n \"RestoreExpiryDate\": \"2023-09-06T12:29:19+01:00\"\n },\n \"StorageClass\": \"DEEP_ARCHIVE\"\n },\n {\n \"Remote\": \"test.gz\",\n \"VersionID\": null,\n \"RestoreStatus\": {\n \"IsRestoreInProgress\": true,\n \"RestoreExpiryDate\": \"null\"\n },\n \"StorageClass\": \"INTELLIGENT_TIERING\"\n }\n ]\n", + "Opts": { + "all": "if set then show all objects, not just ones with restore status" + } + }, + { + "Name": "list-multipart-uploads", + "Short": "List the unfinished multipart uploads", + "Long": "This command lists the unfinished multipart uploads in JSON format.\n\n rclone backend list-multipart s3:bucket/path/to/object\n\nIt returns a dictionary of buckets with values as lists of unfinished\nmultipart uploads.\n\nYou can call it with no bucket in which case it lists all bucket, with\na bucket or with a bucket and path.\n\n {\n \"rclone\": [\n {\n \"Initiated\": \"2020-06-26T14:20:36Z\",\n \"Initiator\": {\n \"DisplayName\": \"XXX\",\n \"ID\": \"arn:aws:iam::XXX:user/XXX\"\n },\n \"Key\": \"KEY\",\n \"Owner\": {\n \"DisplayName\": null,\n \"ID\": \"XXX\"\n },\n \"StorageClass\": \"STANDARD\",\n \"UploadId\": \"XXX\"\n }\n ],\n \"rclone-1000files\": [],\n \"rclone-dst\": []\n }\n\n", + "Opts": null + }, + { + "Name": "cleanup", + "Short": "Remove unfinished multipart uploads.", + "Long": "This command removes unfinished multipart uploads of age greater than\nmax-age which defaults to 24 hours.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup s3:bucket/path/to/object\n rclone backend cleanup -o max-age=7w s3:bucket/path/to/object\n\nDurations are parsed as per the rest of rclone, 2h, 7d, 7w etc.\n", + "Opts": { + "max-age": "Max age of upload to delete" + } + }, + { + "Name": "cleanup-hidden", + "Short": "Remove old versions of files.", + "Long": "This command removes any old hidden versions of files\non a versions enabled bucket.\n\nNote that you can use --interactive/-i or --dry-run with this command to see what\nit would do.\n\n rclone backend cleanup-hidden s3:bucket/path/to/dir\n", + "Opts": null + }, + { + "Name": "versioning", + "Short": "Set/get versioning support for a bucket.", + "Long": "This command sets versioning support if a parameter is\npassed and then returns the current versioning status for the bucket\nsupplied.\n\n rclone backend versioning s3:bucket # read status only\n rclone backend versioning s3:bucket Enabled\n rclone backend versioning s3:bucket Suspended\n\nIt may return \"Enabled\", \"Suspended\" or \"Unversioned\". Note that once versioning\nhas been enabled the status can't be set back to \"Unversioned\".\n", + "Opts": null + }, + { + "Name": "set", + "Short": "Set command for updating the config parameters.", + "Long": "This set command can be used to update the config parameters\nfor a running s3 backend.\n\nUsage Examples:\n\n rclone backend set s3: [-o opt_name=opt_value] [-o opt_name2=opt_value2]\n rclone rc backend/command command=set fs=s3: [-o opt_name=opt_value] [-o opt_name2=opt_value2]\n rclone rc backend/command command=set fs=s3: -o session_token=X -o access_key_id=X -o secret_access_key=X\n\nThe option keys are named as they are in the config file.\n\nThis rebuilds the connection to the s3 backend when it is called with\nthe new parameters. Only new parameters need be passed as the values\nwill default to those currently in use.\n\nIt doesn't return anything.\n", + "Opts": null + } + ], + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": { + "btime": { + "Help": "Time of file birth (creation) read from Last-Modified header", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": true + }, + "cache-control": { + "Help": "Cache-Control header", + "Type": "string", + "Example": "no-cache", + "ReadOnly": false + }, + "content-disposition": { + "Help": "Content-Disposition header", + "Type": "string", + "Example": "inline", + "ReadOnly": false + }, + "content-encoding": { + "Help": "Content-Encoding header", + "Type": "string", + "Example": "gzip", + "ReadOnly": false + }, + "content-language": { + "Help": "Content-Language header", + "Type": "string", + "Example": "en-US", + "ReadOnly": false + }, + "content-type": { + "Help": "Content-Type header", + "Type": "string", + "Example": "text/plain", + "ReadOnly": false + }, + "mtime": { + "Help": "Time of last modification, read from rclone metadata", + "Type": "RFC 3339", + "Example": "2006-01-02T15:04:05.999999999Z07:00", + "ReadOnly": false + }, + "tier": { + "Help": "Tier of the object", + "Type": "string", + "Example": "GLACIER", + "ReadOnly": true } - }, - { - "Name": "uptobox", - "Description": "Uptobox", - "Prefix": "uptobox", - "Options": [ - { - "Name": "access_token", - "Help": "Your access token.\n\nGet it from https://uptobox.com/my_account.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "private", - "Help": "Set to make uploaded files private", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50561070, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,LtGt,DoubleQuote,BackQuote,Del,Ctl,LeftSpace,InvalidUtf8,Dot", - "ValueStr": "Slash,LtGt,DoubleQuote,BackQuote,Del,Ctl,LeftSpace,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + }, + "Help": "User metadata is stored as x-amz-meta- keys. S3 metadata keys are case insensitive and are always returned in lower case." + } + }, + { + "Name": "seafile", + "Description": "seafile", + "Prefix": "seafile", + "Options": [ + { + "Name": "url", + "FieldName": "", + "Help": "URL of seafile host to connect to.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "https://cloud.seafile.com/", + "Help": "Connect to cloud.seafile.com." + } + ], + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user", + "FieldName": "", + "Help": "User name (usually email address).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "Password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "2fa", + "FieldName": "", + "Help": "Two-factor authentication ('true' if the account has 2FA enabled).", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "library", + "FieldName": "", + "Help": "Name of the library.\n\nLeave blank to access all non-encrypted libraries.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "library_key", + "FieldName": "", + "Help": "Library password (for encrypted libraries only).\n\nLeave blank if you pass it through the command line.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "create_library", + "FieldName": "", + "Help": "Should rclone create a library if it doesn't exist.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "auth_token", + "FieldName": "", + "Help": "Authentication token.", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 16850954, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,DoubleQuote,BackSlash,Ctl,InvalidUtf8", + "ValueStr": "Slash,DoubleQuote,BackSlash,Ctl,InvalidUtf8", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "sftp", + "Description": "SSH/SFTP", + "Prefix": "sftp", + "Options": [ + { + "Name": "host", + "FieldName": "", + "Help": "SSH host to connect to.\n\nE.g. \"example.com\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user", + "FieldName": "", + "Help": "SSH username.", + "Default": "vscode", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "vscode", + "ValueStr": "vscode", + "Type": "string" + }, + { + "Name": "port", + "FieldName": "", + "Help": "SSH port number.", + "Default": 22, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "22", + "ValueStr": "22", + "Type": "int" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "SSH password, leave blank to use ssh-agent.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "key_pem", + "FieldName": "", + "Help": "Raw PEM-encoded private key.\n\nNote that this should be on a single line with line endings replaced with '\\n', eg\n\n key_pem = -----BEGIN RSA PRIVATE KEY-----\\nMaMbaIXtE\\n0gAMbMbaSsd\\nMbaass\\n-----END RSA PRIVATE KEY-----\n\nThis will generate the single line correctly:\n\n awk '{printf \"%s\\\\n\", $0}' < ~/.ssh/id_rsa\n\nIf specified, it will override the key_file parameter.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "key_file", + "FieldName": "", + "Help": "Path to PEM-encoded private key file.\n\nLeave blank or set key-use-agent to use ssh-agent.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "key_file_pass", + "FieldName": "", + "Help": "The passphrase to decrypt the PEM-encoded private key file.\n\nOnly PEM encrypted key files (old OpenSSH format) are supported. Encrypted keys\nin the new OpenSSH format can't be used.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "pubkey", + "FieldName": "", + "Help": "SSH public certificate for public certificate based authentication.\nSet this if you have a signed certificate you want to use for authentication.\nIf specified will override pubkey_file.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "pubkey_file", + "FieldName": "", + "Help": "Optional path to public key file.\n\nSet this if you have a signed certificate you want to use for authentication.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "known_hosts_file", + "FieldName": "", + "Help": "Optional path to known_hosts file.\n\nSet this value to enable server host key validation.\n\nLeading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "~/.ssh/known_hosts", + "Help": "Use OpenSSH's known_hosts file." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "key_use_agent", + "FieldName": "", + "Help": "When set forces the usage of the ssh-agent.\n\nWhen key-file is also set, the \".pub\" file of the specified key-file is read and only the associated key is\nrequested from the ssh-agent. This allows to avoid `Too many authentication failures for *username*` errors\nwhen the ssh-agent contains many keys.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_insecure_cipher", + "FieldName": "", + "Help": "Enable the use of insecure ciphers and key exchange methods.\n\nThis enables the use of the following insecure ciphers and key exchange methods:\n\n- aes128-cbc\n- aes192-cbc\n- aes256-cbc\n- 3des-cbc\n- diffie-hellman-group-exchange-sha256\n- diffie-hellman-group-exchange-sha1\n\nThose algorithms are insecure and may allow plaintext data to be recovered by an attacker.\n\nThis must be false if you use either ciphers or key_exchange advanced options.\n", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "false", + "Help": "Use default Cipher list." + }, + { + "Value": "true", + "Help": "Enables the use of the aes128-cbc cipher and diffie-hellman-group-exchange-sha256, diffie-hellman-group-exchange-sha1 key exchange." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_hashcheck", + "FieldName": "", + "Help": "Disable the execution of SSH commands to determine if remote file hashing is available.\n\nLeave blank or set to false to enable hashing (recommended), set to true to disable hashing.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "ask_password", + "FieldName": "", + "Help": "Allow asking for SFTP password when needed.\n\nIf this is set and no password is supplied then rclone will:\n- ask for a password\n- not contact the ssh agent\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "path_override", + "FieldName": "", + "Help": "Override path used by SSH shell commands.\n\nThis allows checksum calculation when SFTP and SSH paths are\ndifferent. This issue affects among others Synology NAS boxes.\n\nE.g. if shared folders can be found in directories representing volumes:\n\n rclone sync /home/local/directory remote:/directory --sftp-path-override /volume2/directory\n\nE.g. if home directory can be found in a shared folder called \"home\":\n\n rclone sync /home/local/directory remote:/home/directory --sftp-path-override /volume1/homes/USER/directory\n\t\nTo specify only the path to the SFTP remote's root, and allow rclone to add any relative subpaths automatically (including unwrapping/decrypting remotes as necessary), add the '@' character to the beginning of the path.\n\nE.g. the first example above could be rewritten as:\n\n\trclone sync /home/local/directory remote:/directory --sftp-path-override @/volume2\n\t\nNote that when using this method with Synology \"home\" folders, the full \"/homes/USER\" path should be specified instead of \"/home\".\n\nE.g. the second example above should be rewritten as:\n\n\trclone sync /home/local/directory remote:/homes/USER/directory --sftp-path-override @/volume1", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "set_modtime", + "FieldName": "", + "Help": "Set the modified time on the remote if set.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "shell_type", + "FieldName": "", + "Help": "The type of SSH shell on remote server, if any.\n\nLeave blank for autodetect.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "none", + "Help": "No shell access" + }, + { + "Value": "unix", + "Help": "Unix shell" + }, + { + "Value": "powershell", + "Help": "PowerShell" + }, + { + "Value": "cmd", + "Help": "Windows Command Prompt" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "md5sum_command", + "FieldName": "", + "Help": "The command used to read md5 hashes.\n\nLeave blank for autodetect.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "sha1sum_command", + "FieldName": "", + "Help": "The command used to read sha1 hashes.\n\nLeave blank for autodetect.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "skip_links", + "FieldName": "", + "Help": "Set to skip any symlinks and any other non regular files.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "subsystem", + "FieldName": "", + "Help": "Specifies the SSH2 subsystem on the remote host.", + "Default": "sftp", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "sftp", + "ValueStr": "sftp", + "Type": "string" + }, + { + "Name": "server_command", + "FieldName": "", + "Help": "Specifies the path or command to run a sftp server on the remote host.\n\nThe subsystem option is ignored when server_command is defined.\n\nIf adding server_command to the configuration file please note that \nit should not be enclosed in quotes, since that will make rclone fail.\n\nA working example is:\n\n [remote_name]\n type = sftp\n server_command = sudo /usr/libexec/openssh/sftp-server", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "use_fstat", + "FieldName": "", + "Help": "If set use fstat instead of stat.\n\nSome servers limit the amount of open files and calling Stat after opening\nthe file will throw an error from the server. Setting this flag will call\nFstat instead of Stat which is called on an already open file handle.\n\nIt has been found that this helps with IBM Sterling SFTP servers which have\n\"extractability\" level set to 1 which means only 1 file can be opened at\nany given time.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_concurrent_reads", + "FieldName": "", + "Help": "If set don't use concurrent reads.\n\nNormally concurrent reads are safe to use and not using them will\ndegrade performance, so this option is disabled by default.\n\nSome servers limit the amount number of times a file can be\ndownloaded. Using concurrent reads can trigger this limit, so if you\nhave a server which returns\n\n Failed to copy: file does not exist\n\nThen you may need to enable this flag.\n\nIf concurrent reads are disabled, the use_fstat option is ignored.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "disable_concurrent_writes", + "FieldName": "", + "Help": "If set don't use concurrent writes.\n\nNormally rclone uses concurrent writes to upload files. This improves\nthe performance greatly, especially for distant servers.\n\nThis option disables concurrent writes should that be necessary.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "idle_timeout", + "FieldName": "", + "Help": "Max time before closing idle connections.\n\nIf no connections have been returned to the connection pool in the time\ngiven, rclone will empty the connection pool.\n\nSet to 0 to keep connections indefinitely.\n", + "Default": 60000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Upload and download chunk size.\n\nThis controls the maximum size of payload in SFTP protocol packets.\nThe RFC limits this to 32768 bytes (32k), which is the default. However,\na lot of servers support larger sizes, typically limited to a maximum\ntotal package size of 256k, and setting it larger will increase transfer\nspeed dramatically on high latency links. This includes OpenSSH, and,\nfor example, using the value of 255k works well, leaving plenty of room\nfor overhead while still being within a total packet size of 256k.\n\nMake sure to test thoroughly before using a value higher than 32k,\nand only use it if you always connect to the same server or after\nsufficiently broad testing. If you get errors such as\n\"failed to send packet payload: EOF\", lots of \"connection lost\",\nor \"corrupted on transfer\", when copying a larger file, try lowering\nthe value. The server run by [rclone serve sftp](/commands/rclone_serve_sftp)\nsends packets with standard 32k maximum payload so you must not\nset a different chunk_size when downloading files, but it accepts\npackets up to the 256k total size, so for uploads the chunk_size\ncan be set as for the OpenSSH example above.\n", + "Default": 32768, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "32Ki", + "ValueStr": "32Ki", + "Type": "SizeSuffix" + }, + { + "Name": "concurrency", + "FieldName": "", + "Help": "The maximum number of outstanding requests for one file\n\nThis controls the maximum number of outstanding requests for one file.\nIncreasing it will increase throughput on high latency links at the\ncost of using more memory.\n", + "Default": 64, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "64", + "ValueStr": "64", + "Type": "int" + }, + { + "Name": "connections", + "FieldName": "", + "Help": "Maximum number of SFTP simultaneous connections, 0 for unlimited.\n\nNote that setting this is very likely to cause deadlocks so it should\nbe used with care.\n\nIf you are doing a sync or copy then make sure connections is one more\nthan the sum of `--transfers` and `--checkers`.\n\nIf you use `--check-first` then it just needs to be one more than the\nmaximum of `--checkers` and `--transfers`.\n\nSo for `connections 3` you'd use `--checkers 2 --transfers 2\n--check-first` or `--checkers 1 --transfers 1`.\n\n", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "set_env", + "FieldName": "", + "Help": "Environment variables to pass to sftp and commands\n\nSet environment variables in the form:\n\n VAR=value\n\nto be passed to the sftp client and to any commands run (eg md5sum).\n\nPass multiple variables space separated, eg\n\n VAR1=value VAR2=value\n\nand pass variables with spaces in quotes, eg\n\n \"VAR3=value with space\" \"VAR4=value with space\" VAR5=nospacehere\n\n", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "SpaceSepList" + }, + { + "Name": "ciphers", + "FieldName": "", + "Help": "Space separated list of ciphers to be used for session encryption, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q cipher.\n\nThis must not be set if use_insecure_cipher is true.\n\nExample:\n\n aes128-ctr aes192-ctr aes256-ctr aes128-gcm@openssh.com aes256-gcm@openssh.com\n", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "SpaceSepList" + }, + { + "Name": "key_exchange", + "FieldName": "", + "Help": "Space separated list of key exchange algorithms, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q kex.\n\nThis must not be set if use_insecure_cipher is true.\n\nExample:\n\n sntrup761x25519-sha512@openssh.com curve25519-sha256 curve25519-sha256@libssh.org ecdh-sha2-nistp256\n", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "SpaceSepList" + }, + { + "Name": "macs", + "FieldName": "", + "Help": "Space separated list of MACs (message authentication code) algorithms, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q mac.\n\nExample:\n\n umac-64-etm@openssh.com umac-128-etm@openssh.com hmac-sha2-256-etm@openssh.com\n", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "SpaceSepList" + }, + { + "Name": "host_key_algorithms", + "FieldName": "", + "Help": "Space separated list of host key algorithms, ordered by preference.\n\nAt least one must match with server configuration. This can be checked for example using ssh -Q HostKeyAlgorithms.\n\nNote: This can affect the outcome of key negotiation with the server even if server host key validation is not enabled.\n\nExample:\n\n ssh-ed25519 ssh-rsa ssh-dss\n", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "SpaceSepList" + }, + { + "Name": "ssh", + "FieldName": "", + "Help": "Path and arguments to external ssh binary.\n\nNormally rclone will use its internal ssh library to connect to the\nSFTP server. However it does not implement all possible ssh options so\nit may be desirable to use an external ssh binary.\n\nRclone ignores all the internal config if you use this option and\nexpects you to configure the ssh binary with the user/host/port and\nany other options you need.\n\n**Important** The ssh command must log in without asking for a\npassword so needs to be configured with keys or certificates.\n\nRclone will run the command supplied either with the additional\narguments \"-s sftp\" to access the SFTP subsystem or with commands such\nas \"md5sum /path/to/file\" appended to read checksums.\n\nAny arguments with spaces in should be surrounded by \"double quotes\".\n\nAn example setting might be:\n\n ssh -o ServerAliveInterval=20 user@example.com\n\nNote that when using an external ssh binary rclone makes a new ssh\nconnection for every hash it calculates.\n", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "SpaceSepList" + }, + { + "Name": "socks_proxy", + "FieldName": "", + "Help": "Socks 5 proxy host.\n\t\nSupports the format user:pass@host:port, user@host:port, host:port.\n\nExample:\n\n\tmyUser:myPass@localhost:9005\n\t", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "http_proxy", + "FieldName": "", + "Help": "URL for HTTP CONNECT proxy\n\nSet this to a URL for an HTTP proxy which supports the HTTP CONNECT verb.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "copy_is_hardlink", + "FieldName": "", + "Help": "Set to enable server side copies using hardlinks.\n\nThe SFTP protocol does not define a copy command so normally server\nside copies are not allowed with the sftp backend.\n\nHowever the SFTP protocol does support hardlinking, and if you enable\nthis flag then the sftp backend will support server side copies. These\nwill be implemented by doing a hardlink from the source to the\ndestination.\n\nNot all sftp servers support this.\n\nNote that hardlinking two files together will use no additional space\nas the source and the destination will be the same file.\n\nThis feature may be useful backups made with --copy-dest.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "sharefile", + "Description": "Citrix Sharefile", + "Prefix": "sharefile", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to multipart upload.", + "Default": 134217728, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "128Mi", + "ValueStr": "128Mi", + "Type": "SizeSuffix" + }, + { + "Name": "root_folder_id", + "FieldName": "", + "Help": "ID of the root folder.\n\nLeave blank to access \"Personal Folders\". You can use one of the\nstandard values here or any folder ID (long hex number ID).", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Access the Personal Folders (default)." + }, + { + "Value": "favorites", + "Help": "Access the Favorites folder." + }, + { + "Value": "allshared", + "Help": "Access all the shared folders." + }, + { + "Value": "connectors", + "Help": "Access all the individual connectors." + }, + { + "Value": "top", + "Help": "Access the home, favorites, and shared folders as well as the connectors." + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Upload chunk size.\n\nMust a power of 2 >= 256k.\n\nMaking this larger will improve performance, but note that each chunk\nis buffered in memory one per transfer.\n\nReducing this will reduce memory usage but decrease performance.", + "Default": 67108864, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "64Mi", + "ValueStr": "64Mi", + "Type": "SizeSuffix" + }, + { + "Name": "endpoint", + "FieldName": "", + "Help": "Endpoint for API calls.\n\nThis is usually auto discovered as part of the oauth process, but can\nbe set manually to something like: https://XXX.sharefile.com\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 57091982, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,LeftPeriod,RightSpace,RightPeriod,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,LeftPeriod,RightSpace,RightPeriod,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "sia", + "Description": "Sia Decentralized Cloud", + "Prefix": "sia", + "Options": [ + { + "Name": "api_url", + "FieldName": "", + "Help": "Sia daemon API URL, like http://sia.daemon.host:9980.\n\nNote that siad must run with --disable-api-security to open API port for other hosts (not recommended).\nKeep default if Sia daemon runs on localhost.", + "Default": "http://127.0.0.1:9980", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "http://127.0.0.1:9980", + "ValueStr": "http://127.0.0.1:9980", + "Type": "string" + }, + { + "Name": "api_password", + "FieldName": "", + "Help": "Sia Daemon API Password.\n\nCan be found in the apipassword file located in HOME/.sia/ or in the daemon directory.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user_agent", + "FieldName": "", + "Help": "Siad User Agent\n\nSia daemon requires the 'Sia-Agent' user agent by default for security", + "Default": "Sia-Agent", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Sia-Agent", + "ValueStr": "Sia-Agent", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50436354, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Question,Hash,Percent,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,Question,Hash,Percent,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "smb", + "Description": "SMB / CIFS", + "Prefix": "smb", + "Options": [ + { + "Name": "host", + "FieldName": "", + "Help": "SMB server hostname to connect to.\n\nE.g. \"example.com\".", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user", + "FieldName": "", + "Help": "SMB username.", + "Default": "vscode", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "vscode", + "ValueStr": "vscode", + "Type": "string" + }, + { + "Name": "port", + "FieldName": "", + "Help": "SMB port number.", + "Default": 445, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "445", + "ValueStr": "445", + "Type": "int" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "SMB password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "domain", + "FieldName": "", + "Help": "Domain name for NTLM authentication.", + "Default": "WORKGROUP", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "WORKGROUP", + "ValueStr": "WORKGROUP", + "Type": "string" + }, + { + "Name": "spn", + "FieldName": "", + "Help": "Service principal name.\n\nRclone presents this name to the server. Some servers use this as further\nauthentication, and it often needs to be set for clusters. For example:\n\n cifs/remotehost:1020\n\nLeave blank if not sure.\n", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "use_kerberos", + "FieldName": "", + "Help": "Use Kerberos authentication.\n\nIf set, rclone will use Kerberos authentication instead of NTLM. This\nrequires a valid Kerberos configuration and credentials cache to be\navailable, either in the default locations or as specified by the\nKRB5_CONFIG and KRB5CCNAME environment variables.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "idle_timeout", + "FieldName": "", + "Help": "Max time before closing idle connections.\n\nIf no connections have been returned to the connection pool in the time\ngiven, rclone will empty the connection pool.\n\nSet to 0 to keep connections indefinitely.\n", + "Default": 60000000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1m0s", + "ValueStr": "1m0s", + "Type": "Duration" + }, + { + "Name": "hide_special_share", + "FieldName": "", + "Help": "Hide special shares (e.g. print$) which users aren't supposed to access.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "case_insensitive", + "FieldName": "", + "Help": "Whether the server is configured to be case-insensitive.\n\nAlways true on Windows shares.", + "Default": true, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 56698766, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,RightSpace,RightPeriod,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,RightSpace,RightPeriod,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "storj", + "Description": "Storj Decentralized Cloud Storage", + "Prefix": "storj", + "Options": [ + { + "Name": "provider", + "FieldName": "", + "Help": "Choose an authentication method.", + "Default": "existing", + "Value": null, + "Examples": [ + { + "Value": "existing", + "Help": "Use an existing access grant." + }, + { + "Value": "new", + "Help": "Create a new access grant from satellite address, API key, and passphrase." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "webdav", - "Description": "WebDAV", - "Prefix": "webdav", - "Options": [ - { - "Name": "url", - "Help": "URL of http host to connect to.\n\nE.g. https://example.com.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": true, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "vendor", - "Help": "Name of the WebDAV site/service/software you are using.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "fastmail", - "Help": "Fastmail Files", - "Provider": "" - }, - { - "Value": "nextcloud", - "Help": "Nextcloud", - "Provider": "" - }, - { - "Value": "owncloud", - "Help": "Owncloud", - "Provider": "" - }, - { - "Value": "sharepoint", - "Help": "Sharepoint Online, authenticated by Microsoft account", - "Provider": "" - }, - { - "Value": "sharepoint-ntlm", - "Help": "Sharepoint with NTLM authentication, usually self-hosted or on-premises", - "Provider": "" - }, - { - "Value": "other", - "Help": "Other site/service or software", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "user", - "Help": "User name.\n\nIn case NTLM authentication is used, the username should be in the format 'Domain\\User'.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "pass", - "Help": "Password.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": true, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "bearer_token", - "Help": "Bearer token instead of user/pass (e.g. a Macaroon).", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "bearer_token_command", - "Help": "Command to run to get a bearer token.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.\n\nDefault encoding is Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8 for sharepoint-ntlm or identity otherwise.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "headers", - "Help": "Set HTTP headers for all transactions.\n\nUse this to set additional HTTP headers for all transactions\n\nThe input format is comma separated list of key,value pairs. Standard\n[CSV encoding](https://godoc.org/encoding/csv) may be used.\n\nFor example, to set a Cookie use 'Cookie,name=value', or '\"Cookie\",\"name=value\"'.\n\nYou can set multiple headers, e.g. '\"Cookie\",\"name=value\",\"Authorization\",\"xxx\"'.\n", - "Provider": "", - "Default": [], - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "CommaSepList" - }, - { - "Name": "pacer_min_sleep", - "Help": "Minimum time to sleep between API calls.", - "Provider": "", - "Default": 10000000, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10ms", - "ValueStr": "10ms", - "Type": "Duration" - }, - { - "Name": "nextcloud_chunk_size", - "Help": "Nextcloud upload chunk size.\n\nWe recommend configuring your NextCloud instance to increase the max chunk size to 1 GB for better upload performances.\nSee https://docs.nextcloud.com/server/latest/admin_manual/configuration_files/big_file_upload_configuration.html#adjust-chunk-size-on-nextcloud-side\n\nSet to 0 to disable chunked uploading.\n", - "Provider": "", - "Default": 10485760, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "10Mi", - "ValueStr": "10Mi", - "Type": "SizeSuffix" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "existing", + "ValueStr": "existing", + "Type": "string" + }, + { + "Name": "access_grant", + "FieldName": "", + "Help": "Access grant.", + "Provider": "existing", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "satellite_address", + "FieldName": "", + "Help": "Satellite address.\n\nCustom satellite address should match the format: `@
:`.", + "Provider": "new", + "Default": "us1.storj.io", + "Value": null, + "Examples": [ + { + "Value": "us1.storj.io", + "Help": "US1" + }, + { + "Value": "eu1.storj.io", + "Help": "EU1" + }, + { + "Value": "ap1.storj.io", + "Help": "AP1" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "yandex", - "Description": "Yandex Disk", - "Prefix": "yandex", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "hard_delete", - "Help": "Delete files permanently rather than putting them into the trash.", - "Provider": "", - "Default": false, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "false", - "ValueStr": "false", - "Type": "bool" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 50429954, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Slash,Del,Ctl,InvalidUtf8,Dot", - "ValueStr": "Slash,Del,Ctl,InvalidUtf8,Dot", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "us1.storj.io", + "ValueStr": "us1.storj.io", + "Type": "string" + }, + { + "Name": "api_key", + "FieldName": "", + "Help": "API key.", + "Provider": "new", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "passphrase", + "FieldName": "", + "Help": "Encryption passphrase.\n\nTo access existing objects enter passphrase used for uploading.", + "Provider": "new", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": [ + "tardigrade" + ], + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "sugarsync", + "Description": "Sugarsync", + "Prefix": "sugarsync", + "Options": [ + { + "Name": "app_id", + "FieldName": "", + "Help": "Sugarsync App ID.\n\nLeave blank to use rclone's.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "access_key_id", + "FieldName": "", + "Help": "Sugarsync Access Key ID.\n\nLeave blank to use rclone's.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "private_access_key", + "FieldName": "", + "Help": "Sugarsync Private Access Key.\n\nLeave blank to use rclone's.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "hard_delete", + "FieldName": "", + "Help": "Permanently delete files if true\notherwise put them in the deleted files.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "refresh_token", + "FieldName": "", + "Help": "Sugarsync refresh token.\n\nLeave blank normally, will be auto configured by rclone.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "authorization", + "FieldName": "", + "Help": "Sugarsync authorization.\n\nLeave blank normally, will be auto configured by rclone.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "authorization_expiry", + "FieldName": "", + "Help": "Sugarsync authorization expiry.\n\nLeave blank normally, will be auto configured by rclone.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user", + "FieldName": "", + "Help": "Sugarsync user.\n\nLeave blank normally, will be auto configured by rclone.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "root_id", + "FieldName": "", + "Help": "Sugarsync root id.\n\nLeave blank normally, will be auto configured by rclone.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "deleted_id", + "FieldName": "", + "Help": "Sugarsync deleted folder id.\n\nLeave blank normally, will be auto configured by rclone.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50397186, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "swift", + "Description": "OpenStack Swift (Rackspace Cloud Files, Blomp Cloud Storage, Memset Memstore, OVH)", + "Prefix": "swift", + "Options": [ + { + "Name": "env_auth", + "FieldName": "", + "Help": "Get swift credentials from environment variables in standard OpenStack form.", + "Default": false, + "Value": null, + "Examples": [ + { + "Value": "false", + "Help": "Enter swift credentials in the next step." + }, + { + "Value": "true", + "Help": "Get swift credentials from environment vars.\nLeave other fields blank if using this." + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null - }, - { - "Name": "zoho", - "Description": "Zoho", - "Prefix": "zoho", - "Options": [ - { - "Name": "client_id", - "Help": "OAuth Client Id.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "client_secret", - "Help": "OAuth Client Secret.\n\nLeave blank normally.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token", - "Help": "OAuth Access Token as a JSON blob.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": true, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "auth_url", - "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "token_url", - "Help": "Token server url.\n\nLeave blank to use the provider defaults.", - "Provider": "", - "Default": "", - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "region", - "Help": "Zoho region to connect to.\n\nYou'll have to use the region your organization is registered in. If\nnot sure use the same top level domain as you connect to in your\nbrowser.", - "Provider": "", - "Default": "", - "Value": null, - "Examples": [ - { - "Value": "com", - "Help": "United states / Global", - "Provider": "" - }, - { - "Value": "eu", - "Help": "Europe", - "Provider": "" - }, - { - "Value": "in", - "Help": "India", - "Provider": "" - }, - { - "Value": "jp", - "Help": "Japan", - "Provider": "" - }, - { - "Value": "com.cn", - "Help": "China", - "Provider": "" - }, - { - "Value": "com.au", - "Help": "Australia", - "Provider": "" - } - ], - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": false, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "", - "ValueStr": "", - "Type": "string" - }, - { - "Name": "encoding", - "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", - "Provider": "", - "Default": 16875520, - "Value": null, - "ShortOpt": "", - "Hide": 0, - "Required": false, - "IsPassword": false, - "NoPrefix": false, - "Advanced": true, - "Exclusive": false, - "Sensitive": false, - "DefaultStr": "Del,Ctl,InvalidUtf8", - "ValueStr": "Del,Ctl,InvalidUtf8", - "Type": "MultiEncoder" - } + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "user", + "FieldName": "", + "Help": "User name to log in (OS_USERNAME).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "key", + "FieldName": "", + "Help": "API key or password (OS_PASSWORD).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth", + "FieldName": "", + "Help": "Authentication URL for server (OS_AUTH_URL).", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "https://auth.api.rackspacecloud.com/v1.0", + "Help": "Rackspace US" + }, + { + "Value": "https://lon.auth.api.rackspacecloud.com/v1.0", + "Help": "Rackspace UK" + }, + { + "Value": "https://identity.api.rackspacecloud.com/v2.0", + "Help": "Rackspace v2" + }, + { + "Value": "https://auth.storage.memset.com/v1.0", + "Help": "Memset Memstore UK" + }, + { + "Value": "https://auth.storage.memset.com/v2.0", + "Help": "Memset Memstore UK v2" + }, + { + "Value": "https://auth.cloud.ovh.net/v3", + "Help": "OVH" + }, + { + "Value": "https://authenticate.ain.net", + "Help": "Blomp Cloud Storage" + } ], - "CommandHelp": null, - "Aliases": null, - "Hide": false, - "MetadataInfo": null + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user_id", + "FieldName": "", + "Help": "User ID to log in - optional - most swift systems use user and leave this blank (v3 auth) (OS_USER_ID).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "domain", + "FieldName": "", + "Help": "User domain - optional (v3 auth) (OS_USER_DOMAIN_NAME)", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "tenant", + "FieldName": "", + "Help": "Tenant name - optional for v1 auth, this or tenant_id required otherwise (OS_TENANT_NAME or OS_PROJECT_NAME).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "tenant_id", + "FieldName": "", + "Help": "Tenant ID - optional for v1 auth, this or tenant required otherwise (OS_TENANT_ID).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "tenant_domain", + "FieldName": "", + "Help": "Tenant domain - optional (v3 auth) (OS_PROJECT_DOMAIN_NAME).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Region name - optional (OS_REGION_NAME).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "storage_url", + "FieldName": "", + "Help": "Storage URL - optional (OS_STORAGE_URL).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_token", + "FieldName": "", + "Help": "Auth Token from alternate authentication - optional (OS_AUTH_TOKEN).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "application_credential_id", + "FieldName": "", + "Help": "Application Credential ID (OS_APPLICATION_CREDENTIAL_ID).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "application_credential_name", + "FieldName": "", + "Help": "Application Credential Name (OS_APPLICATION_CREDENTIAL_NAME).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "application_credential_secret", + "FieldName": "", + "Help": "Application Credential Secret (OS_APPLICATION_CREDENTIAL_SECRET).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_version", + "FieldName": "", + "Help": "AuthVersion - optional - set to (1,2,3) if your auth URL has no version (ST_AUTH_VERSION).", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "endpoint_type", + "FieldName": "", + "Help": "Endpoint type to choose from the service catalogue (OS_ENDPOINT_TYPE).", + "Default": "public", + "Value": null, + "Examples": [ + { + "Value": "public", + "Help": "Public (default, choose this if not sure)" + }, + { + "Value": "internal", + "Help": "Internal (use internal service net)" + }, + { + "Value": "admin", + "Help": "Admin" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "public", + "ValueStr": "public", + "Type": "string" + }, + { + "Name": "leave_parts_on_error", + "FieldName": "", + "Help": "If true avoid calling abort upload on a failure.\n\nIt should be set to true for resuming uploads across different sessions.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "storage_policy", + "FieldName": "", + "Help": "The storage policy to use when creating a new container.\n\nThis applies the specified storage policy when creating a new\ncontainer. The policy cannot be changed afterwards. The allowed\nconfiguration values and their meaning depend on your Swift storage\nprovider.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "", + "Help": "Default" + }, + { + "Value": "pcs", + "Help": "OVH Public Cloud Storage" + }, + { + "Value": "pca", + "Help": "OVH Public Cloud Archive" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "fetch_until_empty_page", + "FieldName": "", + "Help": "When paginating, always fetch unless we received an empty page.\n\nConsider using this option if rclone listings show fewer objects\nthan expected, or if repeated syncs copy unchanged objects.\n\nIt is safe to enable this, but rclone may make more API calls than\nnecessary.\n\nThis is one of a pair of workarounds to handle implementations\nof the Swift API that do not implement pagination as expected. See\nalso \"partial_page_fetch_threshold\".", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "partial_page_fetch_threshold", + "FieldName": "", + "Help": "When paginating, fetch if the current page is within this percentage of the limit.\n\nConsider using this option if rclone listings show fewer objects\nthan expected, or if repeated syncs copy unchanged objects.\n\nIt is safe to enable this, but rclone may make more API calls than\nnecessary.\n\nThis is one of a pair of workarounds to handle implementations\nof the Swift API that do not implement pagination as expected. See\nalso \"fetch_until_empty_page\".", + "Default": 0, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "0", + "ValueStr": "0", + "Type": "int" + }, + { + "Name": "chunk_size", + "FieldName": "", + "Help": "Above this size files will be chunked.\n\nAbove this size files will be chunked into a a `_segments` container\nor a `.file-segments` directory. (See the `use_segments_container` option\nfor more info). Default for this is 5 GiB which is its maximum value, which\nmeans only files above this size will be chunked.\n\nRclone uploads chunked files as dynamic large objects (DLO).\n", + "Default": 5368709120, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "5Gi", + "ValueStr": "5Gi", + "Type": "SizeSuffix" + }, + { + "Name": "no_chunk", + "FieldName": "", + "Help": "Don't chunk files during streaming upload.\n\nWhen doing streaming uploads (e.g. using `rcat` or `mount` with\n`--vfs-cache-mode off`) setting this flag will cause the swift backend\nto not upload chunked files.\n\nThis will limit the maximum streamed upload size to 5 GiB. This is\nuseful because non chunked files are easier to deal with and have an\nMD5SUM.\n\nRclone will still chunk files bigger than `chunk_size` when doing\nnormal copy operations.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "no_large_objects", + "FieldName": "", + "Help": "Disable support for static and dynamic large objects\n\nSwift cannot transparently store files bigger than 5 GiB. There are\ntwo schemes for chunking large files, static large objects (SLO) or\ndynamic large objects (DLO), and the API does not allow rclone to\ndetermine whether a file is a static or dynamic large object without\ndoing a HEAD on the object. Since these need to be treated\ndifferently, this means rclone has to issue HEAD requests for objects\nfor example when reading checksums.\n\nWhen `no_large_objects` is set, rclone will assume that there are no\nstatic or dynamic large objects stored. This means it can stop doing\nthe extra HEAD calls which in turn increases performance greatly\nespecially when doing a swift to swift transfer with `--checksum` set.\n\nSetting this option implies `no_chunk` and also that no files will be\nuploaded in chunks, so files bigger than 5 GiB will just fail on\nupload.\n\nIf you set this option and there **are** static or dynamic large objects,\nthen this will give incorrect hashes for them. Downloads will succeed,\nbut other operations such as Remove and Copy will fail.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "use_segments_container", + "FieldName": "", + "Help": "Choose destination for large object segments\n\nSwift cannot transparently store files bigger than 5 GiB and rclone\nwill chunk files larger than `chunk_size` (default 5 GiB) in order to\nupload them.\n\nIf this value is `true` the chunks will be stored in an additional\ncontainer named the same as the destination container but with\n`_segments` appended. This means that there won't be any duplicated\ndata in the original container but having another container may not be\nacceptable.\n\nIf this value is `false` the chunks will be stored in a\n`.file-segments` directory in the root of the container. This\ndirectory will be omitted when listing the container. Some\nproviders (eg Blomp) require this mode as creating additional\ncontainers isn't allowed. If it is desired to see the `.file-segments`\ndirectory in the root then this flag must be set to `true`.\n\nIf this value is `unset` (the default), then rclone will choose the value\nto use. It will be `false` unless rclone detects any `auth_url`s that\nit knows need it to be `true`. In this case you'll see a message in\nthe DEBUG log.\n", + "Default": { + "Value": false, + "Valid": false + }, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "unset", + "ValueStr": "unset", + "Type": "Tristate" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 16777218, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,InvalidUtf8", + "ValueStr": "Slash,InvalidUtf8", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "tardigrade", + "Description": "Storj Decentralized Cloud Storage", + "Prefix": "tardigrade", + "Options": [ + { + "Name": "provider", + "FieldName": "", + "Help": "Choose an authentication method.", + "Default": "existing", + "Value": null, + "Examples": [ + { + "Value": "existing", + "Help": "Use an existing access grant." + }, + { + "Value": "new", + "Help": "Create a new access grant from satellite address, API key, and passphrase." + } + ], + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "existing", + "ValueStr": "existing", + "Type": "string" + }, + { + "Name": "access_grant", + "FieldName": "", + "Help": "Access grant.", + "Provider": "existing", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "satellite_address", + "FieldName": "", + "Help": "Satellite address.\n\nCustom satellite address should match the format: `@
:`.", + "Provider": "new", + "Default": "us1.storj.io", + "Value": null, + "Examples": [ + { + "Value": "us1.storj.io", + "Help": "US1" + }, + { + "Value": "eu1.storj.io", + "Help": "EU1" + }, + { + "Value": "ap1.storj.io", + "Help": "AP1" + } + ], + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "us1.storj.io", + "ValueStr": "us1.storj.io", + "Type": "string" + }, + { + "Name": "api_key", + "FieldName": "", + "Help": "API key.", + "Provider": "new", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "passphrase", + "FieldName": "", + "Help": "Encryption passphrase.\n\nTo access existing objects enter passphrase used for uploading.", + "Provider": "new", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 3, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": [ + "tardigrade" + ], + "Hide": true, + "MetadataInfo": null + }, + { + "Name": "ulozto", + "Description": "Uloz.to", + "Prefix": "ulozto", + "Options": [ + { + "Name": "app_token", + "FieldName": "", + "Help": "The application token identifying the app. An app API key can be either found in the API\ndoc https://uloz.to/upload-resumable-api-beta or obtained from customer service.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "username", + "FieldName": "", + "Help": "The username of the principal to operate as.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "password", + "FieldName": "", + "Help": "The password for the user.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "root_folder_slug", + "FieldName": "", + "Help": "If set, rclone will use this folder as the root folder for all operations. For example,\nif the slug identifies 'foo/bar/', 'ulozto:baz' is equivalent to 'ulozto:foo/bar/baz' without\nany root slug set.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "list_page_size", + "FieldName": "", + "Help": "The size of a single page for list commands. 1-500", + "Default": 500, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "500", + "ValueStr": "500", + "Type": "int" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50438146, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "union", + "Description": "Union merges the contents of several upstream fs", + "Prefix": "union", + "Options": [ + { + "Name": "upstreams", + "FieldName": "", + "Help": "List of space separated upstreams.\n\nCan be 'upstreama:test/dir upstreamb:', '\"upstreama:test/space:ro dir\" upstreamb:', etc.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "action_policy", + "FieldName": "", + "Help": "Policy to choose upstream on ACTION category.", + "Default": "epall", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "epall", + "ValueStr": "epall", + "Type": "string" + }, + { + "Name": "create_policy", + "FieldName": "", + "Help": "Policy to choose upstream on CREATE category.", + "Default": "epmfs", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "epmfs", + "ValueStr": "epmfs", + "Type": "string" + }, + { + "Name": "search_policy", + "FieldName": "", + "Help": "Policy to choose upstream on SEARCH category.", + "Default": "ff", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "ff", + "ValueStr": "ff", + "Type": "string" + }, + { + "Name": "cache_time", + "FieldName": "", + "Help": "Cache time of usage and free space (in seconds).\n\nThis option is only useful when a path preserving policy is used.", + "Default": 120, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "120", + "ValueStr": "120", + "Type": "int" + }, + { + "Name": "min_free_space", + "FieldName": "", + "Help": "Minimum viable free space for lfs/eplfs policies.\n\nIf a remote has less than this much free space then it won't be\nconsidered for use in lfs or eplfs policies.", + "Default": 1073741824, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "1Gi", + "ValueStr": "1Gi", + "Type": "SizeSuffix" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": { + "System": null, + "Help": "Any metadata supported by the underlying remote is read and written." } + }, + { + "Name": "uptobox", + "Description": "Uptobox", + "Prefix": "uptobox", + "Options": [ + { + "Name": "access_token", + "FieldName": "", + "Help": "Your access token.\n\nGet it from https://uptobox.com/my_account.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "private", + "FieldName": "", + "Help": "Set to make uploaded files private", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50561070, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,LtGt,DoubleQuote,BackQuote,Del,Ctl,LeftSpace,InvalidUtf8,Dot", + "ValueStr": "Slash,LtGt,DoubleQuote,BackQuote,Del,Ctl,LeftSpace,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "webdav", + "Description": "WebDAV", + "Prefix": "webdav", + "Options": [ + { + "Name": "url", + "FieldName": "", + "Help": "URL of http host to connect to.\n\nE.g. https://example.com.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": true, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "vendor", + "FieldName": "", + "Help": "Name of the WebDAV site/service/software you are using.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "fastmail", + "Help": "Fastmail Files" + }, + { + "Value": "nextcloud", + "Help": "Nextcloud" + }, + { + "Value": "owncloud", + "Help": "Owncloud 10 PHP based WebDAV server" + }, + { + "Value": "infinitescale", + "Help": "ownCloud Infinite Scale" + }, + { + "Value": "sharepoint", + "Help": "Sharepoint Online, authenticated by Microsoft account" + }, + { + "Value": "sharepoint-ntlm", + "Help": "Sharepoint with NTLM authentication, usually self-hosted or on-premises" + }, + { + "Value": "rclone", + "Help": "rclone WebDAV server to serve a remote over HTTP via the WebDAV protocol" + }, + { + "Value": "other", + "Help": "Other site/service or software" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "user", + "FieldName": "", + "Help": "User name.\n\nIn case NTLM authentication is used, the username should be in the format 'Domain\\User'.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "pass", + "FieldName": "", + "Help": "Password.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": true, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "bearer_token", + "FieldName": "", + "Help": "Bearer token instead of user/pass (e.g. a Macaroon).", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "bearer_token_command", + "FieldName": "", + "Help": "Command to run to get a bearer token.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.\n\nDefault encoding is Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8 for sharepoint-ntlm or identity otherwise.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "headers", + "FieldName": "", + "Help": "Set HTTP headers for all transactions.\n\nUse this to set additional HTTP headers for all transactions\n\nThe input format is comma separated list of key,value pairs. Standard\n[CSV encoding](https://godoc.org/encoding/csv) may be used.\n\nFor example, to set a Cookie use 'Cookie,name=value', or '\"Cookie\",\"name=value\"'.\n\nYou can set multiple headers, e.g. '\"Cookie\",\"name=value\",\"Authorization\",\"xxx\"'.\n", + "Default": [], + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "CommaSepList" + }, + { + "Name": "pacer_min_sleep", + "FieldName": "", + "Help": "Minimum time to sleep between API calls.", + "Default": 10000000, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10ms", + "ValueStr": "10ms", + "Type": "Duration" + }, + { + "Name": "nextcloud_chunk_size", + "FieldName": "", + "Help": "Nextcloud upload chunk size.\n\nWe recommend configuring your NextCloud instance to increase the max chunk size to 1 GB for better upload performances.\nSee https://docs.nextcloud.com/server/latest/admin_manual/configuration_files/big_file_upload_configuration.html#adjust-chunk-size-on-nextcloud-side\n\nSet to 0 to disable chunked uploading.\n", + "Default": 10485760, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Mi", + "ValueStr": "10Mi", + "Type": "SizeSuffix" + }, + { + "Name": "owncloud_exclude_shares", + "FieldName": "", + "Help": "Exclude ownCloud shares", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "owncloud_exclude_mounts", + "FieldName": "", + "Help": "Exclude ownCloud mounted storages", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "unix_socket", + "FieldName": "", + "Help": "Path to a unix domain socket to dial to, instead of opening a TCP connection directly", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_redirect", + "FieldName": "", + "Help": "Preserve authentication on redirect.\n\nIf the server redirects rclone to a new domain when it is trying to\nread a file then normally rclone will drop the Authorization: header\nfrom the request.\n\nThis is standard security practice to avoid sending your credentials\nto an unknown webserver.\n\nHowever this is desirable in some circumstances. If you are getting\nan error like \"401 Unauthorized\" when rclone is attempting to read\nfiles from the webdav server then you can try this option.\n", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "yandex", + "Description": "Yandex Disk", + "Prefix": "yandex", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "hard_delete", + "FieldName": "", + "Help": "Delete files permanently rather than putting them into the trash.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 50429954, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Slash,Del,Ctl,InvalidUtf8,Dot", + "ValueStr": "Slash,Del,Ctl,InvalidUtf8,Dot", + "Type": "Encoding" + }, + { + "Name": "spoof_ua", + "FieldName": "", + "Help": "Set the user agent to match an official version of the yandex disk client. May help with upload performance.", + "Default": true, + "Value": null, + "Hide": 2, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "true", + "ValueStr": "true", + "Type": "bool" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + }, + { + "Name": "zoho", + "Description": "Zoho", + "Prefix": "zoho", + "Options": [ + { + "Name": "client_id", + "FieldName": "", + "Help": "OAuth Client Id.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_secret", + "FieldName": "", + "Help": "OAuth Client Secret.\n\nLeave blank normally.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token", + "FieldName": "", + "Help": "OAuth Access Token as a JSON blob.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": true, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "auth_url", + "FieldName": "", + "Help": "Auth server URL.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "token_url", + "FieldName": "", + "Help": "Token server url.\n\nLeave blank to use the provider defaults.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "client_credentials", + "FieldName": "", + "Help": "Use client credentials OAuth flow.\n\nThis will use the OAUTH2 client Credentials Flow as described in RFC 6749.", + "Default": false, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "false", + "ValueStr": "false", + "Type": "bool" + }, + { + "Name": "region", + "FieldName": "", + "Help": "Zoho region to connect to.\n\nYou'll have to use the region your organization is registered in. If\nnot sure use the same top level domain as you connect to in your\nbrowser.", + "Default": "", + "Value": null, + "Examples": [ + { + "Value": "com", + "Help": "United states / Global" + }, + { + "Value": "eu", + "Help": "Europe" + }, + { + "Value": "in", + "Help": "India" + }, + { + "Value": "jp", + "Help": "Japan" + }, + { + "Value": "com.cn", + "Help": "China" + }, + { + "Value": "com.au", + "Help": "Australia" + } + ], + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": false, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + }, + { + "Name": "upload_cutoff", + "FieldName": "", + "Help": "Cutoff for switching to large file upload api (>= 10 MiB).", + "Default": 10485760, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "10Mi", + "ValueStr": "10Mi", + "Type": "SizeSuffix" + }, + { + "Name": "encoding", + "FieldName": "", + "Help": "The encoding for the backend.\n\nSee the [encoding section in the overview](/overview/#encoding) for more info.", + "Default": 16875520, + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "Del,Ctl,InvalidUtf8", + "ValueStr": "Del,Ctl,InvalidUtf8", + "Type": "Encoding" + }, + { + "Name": "description", + "FieldName": "", + "Help": "Description of the remote.", + "Default": "", + "Value": null, + "Hide": 0, + "Required": false, + "IsPassword": false, + "NoPrefix": false, + "Advanced": true, + "Exclusive": false, + "Sensitive": false, + "DefaultStr": "", + "ValueStr": "", + "Type": "string" + } + ], + "CommandHelp": null, + "Aliases": null, + "Hide": false, + "MetadataInfo": null + } ] diff --git a/components/renku_data_services/users/api.spec.yaml b/components/renku_data_services/users/api.spec.yaml index db688b719..329000b48 100644 --- a/components/renku_data_services/users/api.spec.yaml +++ b/components/renku_data_services/users/api.spec.yaml @@ -266,6 +266,33 @@ paths: $ref: "#/components/responses/Error" tags: - user_preferences + "/user/preferences/dismiss_project_migration_banner": + post: + summary: Dismiss banner project migration + responses: + "200": + description: The updated user preferences + content: + "application/json": + schema: + $ref: "#/components/schemas/UserPreferences" + default: + $ref: "#/components/responses/Error" + tags: + - user_preferences + delete: + summary: Remove dismiss banner project migration + responses: + "200": + description: The updated user preferences + content: + "application/json": + schema: + $ref: "#/components/schemas/UserPreferences" + default: + $ref: "#/components/responses/Error" + tags: + - user_preferences "/user/preferences/pinned_projects": post: summary: Add a pinned project @@ -418,23 +445,26 @@ components: $ref: "#/components/schemas/Ulid" name: $ref: "#/components/schemas/SecretName" + default_filename: + $ref: "#/components/schemas/SecretDefaultFilename" + modification_date: + $ref: "#/components/schemas/ModificationDate" kind: $ref: "#/components/schemas/SecretKind" + session_secret_slot_ids: + $ref: "#/components/schemas/UlidList" + data_connector_ids: + $ref: "#/components/schemas/UlidList" expiration_timestamp: $ref: "#/components/schemas/ExpirationTimestamp" - modification_date: - $ref: "#/components/schemas/ModificationDate" required: - "id" - "name" - - "kind" + - "default_filename" - "modification_date" - example: - id: "01AN4Z79ZS5XN0F25N3DB94T4R" - name: "S3-Credentials" - kind: general - expiration_timestamp: null - modification_date: "2024-01-16T11:42:05Z" + - "kind" + - "session_secret_slot_ids" + - "data_connector_ids" SecretPost: description: Secret metadata to be created type: object @@ -442,6 +472,8 @@ components: properties: name: $ref: "#/components/schemas/SecretName" + default_filename: + $ref: "#/components/schemas/SecretDefaultFilename" value: $ref: "#/components/schemas/SecretValue" kind: @@ -459,14 +491,23 @@ components: description: Secret metadata to be modified additionalProperties: false properties: + name: + $ref: "#/components/schemas/SecretName" + default_filename: + $ref: "#/components/schemas/SecretDefaultFilename" value: $ref: "#/components/schemas/SecretValue" expiration_timestamp: $ref: "#/components/schemas/ExpirationTimestamp" - required: - - "value" SecretName: - description: Secret name + description: The name of a user secret + type: string + minLength: 1 + maxLength: 99 + example: API Token + SecretDefaultFilename: + description: | + Filename to give to this secret when mounted in Renku 1.0 sessions type: string minLength: 1 maxLength: 99 @@ -478,6 +519,10 @@ components: minLength: 26 maxLength: 26 pattern: "^[0-7][0-9A-HJKMNP-TV-Z]{25}$" # This is case-insensitive + UlidList: + type: array + items: + $ref: "#/components/schemas/Ulid" ModificationDate: description: The date and time the secret was created or modified (this is always in UTC) type: string @@ -501,6 +546,9 @@ components: format: date-time example: "2030-11-01T17:32:28UTC+01:00" default: null + ShowProjectMigrationBanner: + description: Should display project migration banner + type: boolean UserPreferences: type: object description: The object containing user preferences @@ -510,6 +558,8 @@ components: $ref: "#/components/schemas/UserId" pinned_projects: $ref: "#/components/schemas/PinnedProjects" + show_project_migration_banner: + $ref: "#/components/schemas/ShowProjectMigrationBanner" required: ["user_id", "pinned_projects"] PinnedProjects: type: object @@ -518,7 +568,7 @@ components: project_slugs: type: array items: - $ref: "#/components/schemas/ProjectSlug" + $ref: "#/components/schemas/ProjectSlugResponse" ProjectSlug: type: string description: The slug used to identify a project @@ -526,7 +576,12 @@ components: example: "user/my-project" # limitations based on allowed characters in project slugs from Gitlab from here: # https://docs.gitlab.com/ee/user/reserved_names.html - pattern: "[a-zA-Z0-9_.-/]" + pattern: "^[a-zA-Z0-9]+([_.\\-/][a-zA-Z0-9]+)*[_.\\-/]?[a-zA-Z0-9]$" + ProjectSlugResponse: + type: string + description: The slug used to identify a project + minLength: 3 + example: "user/my-project" AddPinnedProject: type: object additionalProperties: false diff --git a/components/renku_data_services/users/apispec.py b/components/renku_data_services/users/apispec.py index 01cf4be08..2b45b2a95 100644 --- a/components/renku_data_services/users/apispec.py +++ b/components/renku_data_services/users/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2024-10-18T13:00:24+00:00 +# timestamp: 2025-05-08T14:23:21+00:00 from __future__ import annotations @@ -23,18 +23,27 @@ class Version(BaseAPISpec): version: str +class Ulid(RootModel[str]): + root: str = Field( + ..., + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) + + class SecretKind(Enum): general = "general" storage = "storage" -class ProjectSlug(RootModel[str]): +class ProjectSlugResponse(RootModel[str]): root: str = Field( ..., description="The slug used to identify a project", - example="user/my-project", + examples=["user/my-project"], min_length=3, - pattern="[a-zA-Z0-9_.-/]", ) @@ -45,18 +54,20 @@ class AddPinnedProject(BaseAPISpec): project_slug: str = Field( ..., description="The slug used to identify a project", - example="user/my-project", + examples=["user/my-project"], min_length=3, - pattern="[a-zA-Z0-9_.-/]", + pattern="^[a-zA-Z0-9]+([_.\\-/][a-zA-Z0-9]+)*[_.\\-/]?[a-zA-Z0-9]$", ) class Error(BaseAPISpec): - code: int = Field(..., example=1404, gt=0) + code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( - None, example="A more detailed optional message showing what the problem was" + None, examples=["A more detailed optional message showing what the problem was"] + ) + message: str = Field( + ..., examples=["Something went wrong - please try again later"] ) - message: str = Field(..., example="Something went wrong - please try again later") class ErrorResponse(BaseAPISpec): @@ -107,30 +118,30 @@ class UserWithId(BaseAPISpec): id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) username: str = Field( ..., description="Handle of the user", - example="some-username", + examples=["some-username"], max_length=99, min_length=1, ) email: Optional[str] = Field( - None, description="User email", example="some-user@gmail.com" + None, description="User email", examples=["some-user@gmail.com"] ) first_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) last_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) @@ -147,30 +158,30 @@ class SelfUserInfo(BaseAPISpec): id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) username: str = Field( ..., description="Handle of the user", - example="some-username", + examples=["some-username"], max_length=99, min_length=1, ) email: Optional[str] = Field( - None, description="User email", example="some-user@gmail.com" + None, description="User email", examples=["some-user@gmail.com"] ) first_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) last_name: Optional[str] = Field( None, description="First or last name of the user", - example="John", + examples=["John"], max_length=256, min_length=1, ) @@ -192,8 +203,15 @@ class SecretWithId(BaseAPISpec): ) name: str = Field( ..., - description="Secret name", - example="Data-S3-Secret_1", + description="The name of a user secret", + examples=["API Token"], + max_length=99, + min_length=1, + ) + default_filename: str = Field( + ..., + description="Filename to give to this secret when mounted in Renku 1.0 sessions\n", + examples=["Data-S3-Secret_1"], max_length=99, min_length=1, pattern="^[a-zA-Z0-9_\\-.]*$", @@ -207,8 +225,11 @@ class SecretWithId(BaseAPISpec): modification_date: datetime = Field( ..., description="The date and time the secret was created or modified (this is always in UTC)", - example="2023-11-01T17:32:28Z", + examples=["2023-11-01T17:32:28Z"], ) + kind: SecretKind + session_secret_slot_ids: List[Ulid] + data_connector_ids: List[Ulid] class SecretPost(BaseAPISpec): @@ -217,8 +238,15 @@ class SecretPost(BaseAPISpec): ) name: str = Field( ..., - description="Secret name", - example="Data-S3-Secret_1", + description="The name of a user secret", + examples=["API Token"], + max_length=99, + min_length=1, + ) + default_filename: Optional[str] = Field( + None, + description="Filename to give to this secret when mounted in Renku 1.0 sessions\n", + examples=["Data-S3-Secret_1"], max_length=99, min_length=1, pattern="^[a-zA-Z0-9_\\-.]*$", @@ -241,8 +269,23 @@ class SecretPatch(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - value: str = Field( - ..., + name: Optional[str] = Field( + None, + description="The name of a user secret", + examples=["API Token"], + max_length=99, + min_length=1, + ) + default_filename: Optional[str] = Field( + None, + description="Filename to give to this secret when mounted in Renku 1.0 sessions\n", + examples=["Data-S3-Secret_1"], + max_length=99, + min_length=1, + pattern="^[a-zA-Z0-9_\\-.]*$", + ) + value: Optional[str] = Field( + None, description="Secret value that can be any text", max_length=5000, min_length=1, @@ -255,7 +298,7 @@ class SecretPatch(BaseAPISpec): class PinnedProjects(BaseAPISpec): - project_slugs: Optional[List[ProjectSlug]] = None + project_slugs: Optional[List[ProjectSlugResponse]] = None class SecretsList(RootModel[List[SecretWithId]]): @@ -269,7 +312,10 @@ class UserPreferences(BaseAPISpec): user_id: str = Field( ..., description="Keycloak user ID", - example="f74a228b-1790-4276-af5f-25c2424e9b0c", + examples=["f74a228b-1790-4276-af5f-25c2424e9b0c"], pattern="^[A-Za-z0-9]{1}[A-Za-z0-9-]+$", ) pinned_projects: PinnedProjects + show_project_migration_banner: Optional[bool] = Field( + None, description="Should display project migration banner" + ) diff --git a/components/renku_data_services/users/blueprints.py b/components/renku_data_services/users/blueprints.py index ad8a48e48..73eadc116 100644 --- a/components/renku_data_services/users/blueprints.py +++ b/components/renku_data_services/users/blueprints.py @@ -1,8 +1,8 @@ """Blueprints for the user endpoints.""" from dataclasses import dataclass +from typing import Any -from cryptography.hazmat.primitives.asymmetric import rsa from sanic import HTTPResponse, Request, json from sanic.response import JSONResponse from sanic_ext import validate @@ -14,10 +14,10 @@ from renku_data_services.base_api.misc import validate_query from renku_data_services.base_models.validation import validated_json from renku_data_services.errors import errors -from renku_data_services.secrets.core import encrypt_user_secret from renku_data_services.secrets.db import UserSecretsRepo -from renku_data_services.secrets.models import SecretKind, UnsavedSecret +from renku_data_services.secrets.models import Secret, SecretKind from renku_data_services.users import apispec, models +from renku_data_services.users.core import validate_secret_patch, validate_unsaved_secret from renku_data_services.users.db import UserPreferencesRepository, UserRepo @@ -40,7 +40,7 @@ async def _get_all(request: Request, user: base_models.APIUser, query: apispec.U [ dict( id=user.id, - username=user.namespace.slug, + username=user.namespace.path.first.value, email=user.email, first_name=user.first_name, last_name=user.last_name, @@ -66,7 +66,7 @@ async def _get_self(_: Request, user: base_models.APIUser) -> JSONResponse: apispec.SelfUserInfo, dict( id=user_info.id, - username=user_info.namespace.slug, + username=user_info.namespace.path.first.value, email=user_info.email, first_name=user_info.first_name, last_name=user_info.last_name, @@ -101,7 +101,7 @@ async def _get_one(_: Request, user: base_models.APIUser, user_id: str) -> JSONR apispec.UserWithId, dict( id=user_info.id, - username=user_info.namespace.slug, + username=user_info.namespace.path.first.value, email=user_info.email, first_name=user_info.first_name, last_name=user_info.last_name, @@ -136,9 +136,7 @@ class UserSecretsBP(CustomBlueprint): """ secret_repo: UserSecretsRepo - user_repo: UserRepo authenticator: base_models.Authenticator - secret_service_public_key: rsa.RSAPublicKey def get_all(self) -> BlueprintFactoryResponse: """Get all user's secrets.""" @@ -151,18 +149,9 @@ async def _get_all( ) -> JSONResponse: secret_kind = SecretKind[query.kind.value] secrets = await self.secret_repo.get_user_secrets(requested_by=user, kind=secret_kind) - secrets_json = [ - secret.model_dump( - include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, - exclude_none=True, - mode="json", - ) - for secret in secrets - ] return validated_json( apispec.SecretsList, - secrets_json, - 200, + [self._dump_secret(s) for s in secrets], ) return "/user/secrets", ["GET"], _get_all @@ -174,14 +163,10 @@ def get_one(self) -> BlueprintFactoryResponse: @only_authenticated async def _get_one(_: Request, user: base_models.APIUser, secret_id: ULID) -> JSONResponse: secret = await self.secret_repo.get_secret_by_id(requested_by=user, secret_id=secret_id) - if not secret: - raise errors.MissingResourceError(message=f"The secret with id {secret_id} cannot be found.") - result = secret.model_dump( - include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, - exclude_none=False, - mode="json", + return validated_json( + apispec.SecretWithId, + self._dump_secret(secret), ) - return validated_json(apispec.SecretWithId, result, exclude_none=False) return "/user/secrets/", ["GET"], _get_one @@ -192,26 +177,9 @@ def post(self) -> BlueprintFactoryResponse: @only_authenticated @validate(json=apispec.SecretPost) async def _post(_: Request, user: base_models.APIUser, body: apispec.SecretPost) -> JSONResponse: - encrypted_value, encrypted_key = await encrypt_user_secret( - user_repo=self.user_repo, - requested_by=user, - secret_service_public_key=self.secret_service_public_key, - secret_value=body.value, - ) - secret = UnsavedSecret( - name=body.name, - encrypted_value=encrypted_value, - encrypted_key=encrypted_key, - kind=SecretKind[body.kind.value], - expiration_timestamp=body.expiration_timestamp, - ) - inserted_secret = await self.secret_repo.insert_secret(requested_by=user, secret=secret) - result = inserted_secret.model_dump( - include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, - exclude_none=False, - mode="json", - ) - return validated_json(apispec.SecretWithId, result, 201, exclude_none=False) + new_secret = validate_unsaved_secret(body) + inserted_secret = await self.secret_repo.insert_secret(requested_by=user, secret=new_secret) + return validated_json(apispec.SecretWithId, self._dump_secret(inserted_secret), status=201) return "/user/secrets", ["POST"], _post @@ -224,27 +192,15 @@ def patch(self) -> BlueprintFactoryResponse: async def _patch( _: Request, user: base_models.APIUser, secret_id: ULID, body: apispec.SecretPatch ) -> JSONResponse: - encrypted_value, encrypted_key = await encrypt_user_secret( - user_repo=self.user_repo, - requested_by=user, - secret_service_public_key=self.secret_service_public_key, - secret_value=body.value, - ) + secret_patch = validate_secret_patch(body) updated_secret = await self.secret_repo.update_secret( - requested_by=user, - secret_id=secret_id, - encrypted_value=encrypted_value, - encrypted_key=encrypted_key, - expiration_timestamp=body.expiration_timestamp, + requested_by=user, secret_id=secret_id, patch=secret_patch ) - result = updated_secret.model_dump( - include={"id", "name", "kind", "expiration_timestamp", "modification_date"}, - exclude_none=False, - mode="json", + return validated_json( + apispec.SecretWithId, + self._dump_secret(updated_secret), ) - return validated_json(apispec.SecretWithId, result, exclude_none=False) - return "/user/secrets/", ["PATCH"], _patch def delete(self) -> BlueprintFactoryResponse: @@ -258,6 +214,19 @@ async def _delete(_: Request, user: base_models.APIUser, secret_id: ULID) -> HTT return "/user/secrets/", ["DELETE"], _delete + @staticmethod + def _dump_secret(secret: Secret) -> dict[str, Any]: + """Dumps a secret for API responses.""" + return dict( + id=str(secret.id), + name=secret.name, + default_filename=secret.default_filename, + kind=secret.kind.value, + modification_date=secret.modification_date, + session_secret_slot_ids=[str(item) for item in secret.session_secret_slot_ids], + data_connector_ids=[str(item) for item in secret.data_connector_ids], + ) + @dataclass(kw_only=True) class UserPreferencesBP(CustomBlueprint): @@ -302,3 +271,23 @@ async def _delete( return validated_json(apispec.UserPreferences, res) return "/user/preferences/pinned_projects", ["DELETE"], _delete + + def post_dismiss_project_migration_banner(self) -> BlueprintFactoryResponse: + """Add dismiss project migration banner to user preferences for the logged in user.""" + + @authenticate(self.authenticator) + async def _post(_: Request, user: base_models.APIUser) -> JSONResponse: + res = await self.user_preferences_repo.add_dismiss_project_migration_banner(requested_by=user) + return validated_json(apispec.UserPreferences, res) + + return "/user/preferences/dismiss_project_migration_banner", ["POST"], _post + + def delete_dismiss_project_migration_banner(self) -> BlueprintFactoryResponse: + """Remove dismiss project migration banner from user preferences for the logged in user.""" + + @authenticate(self.authenticator) + async def _delete(request: Request, user: base_models.APIUser) -> JSONResponse: + res = await self.user_preferences_repo.remove_dismiss_project_migration_banner(requested_by=user) + return validated_json(apispec.UserPreferences, res) + + return "/user/preferences/dismiss_project_migration_banner", ["DELETE"], _delete diff --git a/components/renku_data_services/users/config.py b/components/renku_data_services/users/config.py index 370f61994..9c93767ff 100644 --- a/components/renku_data_services/users/config.py +++ b/components/renku_data_services/users/config.py @@ -1,6 +1,8 @@ """Configuration for user preferences.""" +import os from dataclasses import dataclass +from typing import Self @dataclass(frozen=True, eq=True, kw_only=True) @@ -8,3 +10,9 @@ class UserPreferencesConfig: """User preferences configuration.""" max_pinned_projects: int + + @classmethod + def from_env(cls) -> Self: + """Load config from environment.""" + max_pinned_projects = int(os.environ.get("MAX_PINNED_PROJECTS", "10")) + return cls(max_pinned_projects=max_pinned_projects) diff --git a/components/renku_data_services/users/core.py b/components/renku_data_services/users/core.py new file mode 100644 index 000000000..3f2c790b9 --- /dev/null +++ b/components/renku_data_services/users/core.py @@ -0,0 +1,24 @@ +"""Business logic for user endpoints.""" + +from renku_data_services.secrets.models import SecretKind, SecretPatch, UnsavedSecret +from renku_data_services.users import apispec + + +def validate_unsaved_secret(body: apispec.SecretPost) -> UnsavedSecret: + """Validate a new secret to be created.""" + secret_kind = SecretKind(body.kind.value) + return UnsavedSecret( + name=body.name, + default_filename=body.default_filename, + secret_value=body.value, + kind=secret_kind, + ) + + +def validate_secret_patch(patch: apispec.SecretPatch) -> SecretPatch: + """Validate the update to a secret.""" + return SecretPatch( + name=patch.name, + default_filename=patch.default_filename, + secret_value=patch.value, + ) diff --git a/components/renku_data_services/users/db.py b/components/renku_data_services/users/db.py index 38486d31d..496c02194 100644 --- a/components/renku_data_services/users/db.py +++ b/components/renku_data_services/users/db.py @@ -1,27 +1,28 @@ """Database adapters and helpers for users.""" +from __future__ import annotations + import secrets -from collections.abc import AsyncGenerator, Callable +from abc import abstractmethod +from collections.abc import AsyncGenerator, Callable, Mapping from dataclasses import dataclass, field from datetime import UTC, datetime, timedelta -from typing import Any, cast +from typing import Any, Protocol, cast -from sanic.log import logger from sqlalchemy import delete, func, select from sqlalchemy.ext.asyncio import AsyncSession from renku_data_services import base_models +from renku_data_services.app_config import logging from renku_data_services.authz.authz import Authz, AuthzOperation, ResourceType from renku_data_services.base_api.auth import APIUser, only_authenticated from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId +from renku_data_services.base_models.nel import Nel from renku_data_services.errors import errors -from renku_data_services.message_queue import events -from renku_data_services.message_queue.avro_models.io.renku.events import v2 as avro_schema_v2 -from renku_data_services.message_queue.db import EventRepository -from renku_data_services.message_queue.interface import IMessageQueue -from renku_data_services.message_queue.redis_queue import dispatch_message from renku_data_services.namespace.db import GroupRepository from renku_data_services.namespace.orm import NamespaceORM +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.decorators import update_search_document from renku_data_services.users.config import UserPreferencesConfig from renku_data_services.users.kc_api import IKeycloakAPI from renku_data_services.users.models import ( @@ -39,22 +40,57 @@ from renku_data_services.utils.core import with_db_transaction from renku_data_services.utils.cryptography import decrypt_string, encrypt_string +logger = logging.getLogger(__name__) + + +class UsernameResolver(Protocol): + """Resolve usernames to their ids.""" + + @abstractmethod + async def resolve_usernames(self, names: Nel[str]) -> Mapping[str, str]: + """Return a map of username->user_id tuples.""" + ... + + +class DbUsernameResolver(UsernameResolver, Protocol): + """Resolve usernames using the database.""" + + @abstractmethod + def make_session(self) -> AsyncSession: + """Create a db session.""" + ... + + async def resolve_usernames(self, names: Nel[str]) -> dict[str, str]: + """Resolve usernames to their user ids.""" + async with self.make_session() as session, session.begin(): + result = await session.execute( + select(NamespaceORM.slug, NamespaceORM.user_id).where( + NamespaceORM.slug.in_(names), NamespaceORM.user_id.is_not(None) + ) + ) + ret: dict[str, str] = {} + for slug, id in result: + ret.update({slug: id}) + + return ret + @dataclass -class UserRepo: +class UserRepo(DbUsernameResolver): """An adapter for accessing users from the database.""" session_maker: Callable[..., AsyncSession] - message_queue: IMessageQueue - event_repo: EventRepository group_repo: GroupRepository + search_updates_repo: SearchUpdatesRepo encryption_key: bytes | None = field(repr=False) authz: Authz def __post_init__(self) -> None: - self._users_sync = UsersSync( - self.session_maker, self.message_queue, self.event_repo, self.group_repo, self, self.authz - ) + self._users_sync = UsersSync(self.session_maker, self.group_repo, self, self.authz) + + def make_session(self) -> AsyncSession: + """Create a db session.""" + return self.session_maker() async def initialize(self, kc_api: IKeycloakAPI) -> None: """Do a total sync of users from Keycloak if there is nothing in the DB.""" @@ -145,7 +181,7 @@ async def remove_user(self, requested_by: APIUser, user_id: str) -> DeletedUser @with_db_transaction @Authz.authz_change(AuthzOperation.delete, ResourceType.user) - @dispatch_message(avro_schema_v2.UserRemoved) + @update_search_document async def _remove_user( self, requested_by: APIUser, user_id: str, *, session: AsyncSession | None = None ) -> DeletedUser | None: @@ -191,18 +227,15 @@ class UsersSync: def __init__( self, session_maker: Callable[..., AsyncSession], - message_queue: IMessageQueue, - event_repo: EventRepository, group_repo: GroupRepository, user_repo: UserRepo, authz: Authz, ) -> None: self.session_maker = session_maker - self.message_queue: IMessageQueue = message_queue - self.event_repo: EventRepository = event_repo self.group_repo = group_repo self.user_repo = user_repo self.authz = authz + self.search_updates_repo = user_repo.search_updates_repo async def _get_user(self, id: str) -> UserInfo | None: """Get a specific user.""" @@ -214,7 +247,7 @@ async def _get_user(self, id: str) -> UserInfo | None: @with_db_transaction @Authz.authz_change(AuthzOperation.update_or_insert, ResourceType.user) - @dispatch_message(events.UpdateOrInsertUser) + @update_search_document async def update_or_insert_user( self, user: UnsavedUserInfo, *, session: AsyncSession | None = None ) -> UserInfoUpdate: @@ -381,7 +414,7 @@ async def get_user_preferences( user_preferences = res.one_or_none() if user_preferences is None: - raise errors.MissingResourceError(message="Preferences not found for user.", quiet=True) + raise errors.MissingResourceError(message="Preferences not found for user.") return user_preferences.dump() @only_authenticated @@ -405,7 +438,8 @@ async def add_pinned_project(self, requested_by: APIUser, project_slug: str) -> if user_preferences is None: new_preferences = UserPreferences( - user_id=cast(str, requested_by.id), pinned_projects=PinnedProjects(project_slugs=[project_slug]) + user_id=cast(str, requested_by.id), + pinned_projects=PinnedProjects(project_slugs=[project_slug]), ) user_preferences = UserPreferencesORM.load(new_preferences) session.add(user_preferences) @@ -442,7 +476,7 @@ async def remove_pinned_project(self, requested_by: APIUser, project_slug: str) user_preferences = res.one_or_none() if user_preferences is None: - raise errors.MissingResourceError(message="Preferences not found for user.", quiet=True) + raise errors.MissingResourceError(message="Preferences not found for user.") project_slugs: list[str] project_slugs = user_preferences.pinned_projects.get("project_slugs", []) @@ -455,3 +489,38 @@ async def remove_pinned_project(self, requested_by: APIUser, project_slug: str) pinned_projects = PinnedProjects(project_slugs=new_project_slugs).model_dump() user_preferences.pinned_projects = pinned_projects return user_preferences.dump() + + @only_authenticated + async def add_dismiss_project_migration_banner(self, requested_by: base_models.APIUser) -> UserPreferences: + """Set the dismiss project migration banner to true.""" + async with self.session_maker() as session, session.begin(): + result = await session.scalars( + select(UserPreferencesORM).where(UserPreferencesORM.user_id == cast(str, requested_by.id)) + ) + user_preferences_orm = result.one_or_none() + if user_preferences_orm is None: + user_preferences_orm = UserPreferencesORM( + user_id=cast(str, requested_by.id), + pinned_projects={"project_slugs": []}, + show_project_migration_banner=False, + ) + session.add(user_preferences_orm) + else: + user_preferences_orm.show_project_migration_banner = False + + await session.flush() + await session.refresh(user_preferences_orm) + return user_preferences_orm.dump() + + @only_authenticated + async def remove_dismiss_project_migration_banner(self, requested_by: APIUser) -> UserPreferences: + """Removes dismiss project migration banner from the user's preferences.""" + async with self.session_maker() as session, session.begin(): + res = await session.scalars(select(UserPreferencesORM).where(UserPreferencesORM.user_id == requested_by.id)) + user_preferences = res.one_or_none() + + if user_preferences is None: + raise errors.MissingResourceError(message="Preferences not found for user.", quiet=True) + + user_preferences.show_project_migration_banner = True + return user_preferences.dump() diff --git a/components/renku_data_services/users/kc_api.py b/components/renku_data_services/users/kc_api.py index 19c6f0720..81baff015 100644 --- a/components/renku_data_services/users/kc_api.py +++ b/components/renku_data_services/users/kc_api.py @@ -6,12 +6,13 @@ from datetime import date from typing import Any, ClassVar, Protocol, cast -import requests +from authlib.integrations.base_client import InvalidTokenError from authlib.integrations.requests_client import OAuth2Session from authlib.oauth2.rfc7523 import ClientSecretJWT from requests.adapters import HTTPAdapter from urllib3.util import Retry +from renku_data_services.errors import errors from renku_data_services.users.models import KeycloakAdminEvent, KeycloakEvent @@ -51,7 +52,7 @@ class KeycloakAPI: realm: str = "Renku" client_id: str = "renku" result_per_request_limit: int = 20 - _http_client: requests.Session = field(init=False, repr=False) + _http_client: OAuth2Session = field(init=False, repr=False) admin_role: ClassVar[str] = "renku-admin" def __post_init__(self) -> None: @@ -61,21 +62,33 @@ def __post_init__(self) -> None: backoff_factor=2, ) adapter = HTTPAdapter(max_retries=retry_strategy) - token_endpoint = f"{self.keycloak_url}/realms/{self.realm}/protocol/openid-connect/token" + token_endpoint = self.__token_endpoint session = OAuth2Session( client_id=self.client_id, client_secret=self.client_secret, token_endpoint_auth_method=ClientSecretJWT(token_endpoint), + token_endpoint=token_endpoint, ) session.mount("http://", adapter) session.mount("https://", adapter) - session.fetch_token( - url=token_endpoint, + self._http_client = session + self.__fetch_token() + + @property + def __token_endpoint(self) -> str: + return f"{self.keycloak_url}/realms/{self.realm}/protocol/openid-connect/token" + + def __fetch_token(self) -> None: + if self._http_client is None: + raise errors.ProgrammingError( + message="Cannot fetch a new Keycloak token if the HTTP Keycloak client is not initialized" + ) + self._http_client.fetch_token( client_id=self.client_id, client_secret=self.client_secret, + url=self.__token_endpoint, grant_type="client_credentials", ) - self._http_client = session def _paginated_requests_iter(self, path: str, query_args: dict[str, Any] | None = None) -> Iterable[dict[str, Any]]: url = self.keycloak_url + path @@ -84,7 +97,12 @@ def _paginated_requests_iter(self, path: str, query_args: dict[str, Any] | None req_query_args["max"] = self.result_per_request_limit + 1 first = 0 while True: - res = self._http_client.get(url, params={**req_query_args, "first": first}) + try: + res = self._http_client.get(url, params={**req_query_args, "first": first}) + except InvalidTokenError: + # NOTE: The library does not support getting new tokens automatically with client_credentials grant + self.__fetch_token() + continue output = res.json() if not isinstance(output, list): raise ValueError( diff --git a/components/renku_data_services/users/models.py b/components/renku_data_services/users/models.py index 1c7558f74..8a77fc78e 100644 --- a/components/renku_data_services/users/models.py +++ b/components/renku_data_services/users/models.py @@ -9,24 +9,26 @@ from typing import Any, NamedTuple from pydantic import BaseModel, Field -from sanic.log import logger -from renku_data_services.namespace.models import Namespace +from renku_data_services.app_config import logging +from renku_data_services.namespace.models import UserNamespace + +logger = logging.getLogger(__name__) class KeycloakEvent(Enum): """The Keycloak user events that result from the user registering or updating their personal information.""" - REGISTER: str = "REGISTER" - UPDATE_PROFILE: str = "UPDATE_PROFILE" + REGISTER = "REGISTER" + UPDATE_PROFILE = "UPDATE_PROFILE" class KeycloakAdminEvent(Enum): """The Keycloak admin events used to keep users up to date.""" - DELETE: str = "DELETE" - UPDATE: str = "UPDATE" - CREATE: str = "CREATE" + DELETE = "DELETE" + UPDATE = "UPDATE" + CREATE = "CREATE" @dataclass @@ -40,7 +42,7 @@ class UserInfoFieldUpdate: old_value: str | None = None @classmethod - def from_json_user_events(self, val: Iterable[dict[str, Any]]) -> list["UserInfoFieldUpdate"]: + def from_json_user_events(cls, val: Iterable[dict[str, Any]]) -> list["UserInfoFieldUpdate"]: """Generate a list of updates from a json response from Keycloak.""" output: list[UserInfoFieldUpdate] = [] for event in val: @@ -131,7 +133,7 @@ def from_json_user_events(self, val: Iterable[dict[str, Any]]) -> list["UserInfo return output @classmethod - def from_json_admin_events(self, val: Iterable[dict[str, Any]]) -> list["UserInfoFieldUpdate"]: + def from_json_admin_events(cls, val: Iterable[dict[str, Any]]) -> list["UserInfoFieldUpdate"]: """Generate a list of updates from a json response from Keycloak.""" output: list[UserInfoFieldUpdate] = [] for event in val: @@ -225,7 +227,7 @@ def from_kc_user_payload(cls, payload: dict[str, Any]) -> "UnsavedUserInfo": email=payload.get("email"), ) - def _to_keycloak_dict(self) -> dict[str, Any]: + def to_keycloak_dict(self) -> dict[str, Any]: """Create a payload that would have been created by Keycloak for this user, used only for testing.""" return { @@ -257,7 +259,7 @@ def _to_keycloak_dict(self) -> dict[str, Any]: class UserInfo(UnsavedUserInfo): """A tuple used to convey information about a user and their namespace.""" - namespace: Namespace + namespace: UserNamespace @dataclass(frozen=True, eq=True, kw_only=True) @@ -308,3 +310,4 @@ class UserPreferences(BaseModel): user_id: str = Field(min_length=3) pinned_projects: PinnedProjects + show_project_migration_banner: bool = True diff --git a/components/renku_data_services/users/orm.py b/components/renku_data_services/users/orm.py index e6023340a..a53b45f51 100644 --- a/components/renku_data_services/users/orm.py +++ b/components/renku_data_services/users/orm.py @@ -3,7 +3,7 @@ from datetime import datetime from typing import TYPE_CHECKING, Any, Optional -from sqlalchemy import JSON, DateTime, Identity, Integer, LargeBinary, MetaData, String +from sqlalchemy import JSON, Boolean, DateTime, Identity, Integer, LargeBinary, MetaData, String, true from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, relationship @@ -42,7 +42,7 @@ def dump(self) -> UserInfo: first_name=self.first_name, last_name=self.last_name, email=self.email, - namespace=self.namespace.dump(), + namespace=self.namespace.dump_user_namespace(), ) @classmethod @@ -53,7 +53,7 @@ def load(cls, user: UserInfo) -> "UserORM": first_name=user.first_name, last_name=user.last_name, email=user.email, - namespace=NamespaceORM.load(user.namespace), + namespace=NamespaceORM.load_user(user.namespace), ) @@ -79,14 +79,26 @@ class UserPreferencesORM(BaseORM): pinned_projects: Mapped[dict[str, Any]] = mapped_column("pinned_projects", JSONVariant) """Pinned projects.""" + show_project_migration_banner: Mapped[bool] = mapped_column( + "show_project_migration_banner", + Boolean, + server_default=true(), + ) + """Show project migration banner.""" + @classmethod def load(cls, user_preferences: UserPreferences) -> "UserPreferencesORM": """Create UserPreferencesORM from the user preferences model.""" return cls( user_id=user_preferences.user_id, pinned_projects=user_preferences.pinned_projects.model_dump(), + show_project_migration_banner=user_preferences.show_project_migration_banner, ) def dump(self) -> UserPreferences: """Create a user preferences model from the ORM object.""" - return UserPreferences(user_id=self.user_id, pinned_projects=PinnedProjects.from_dict(self.pinned_projects)) + return UserPreferences( + user_id=self.user_id, + pinned_projects=PinnedProjects.from_dict(self.pinned_projects), + show_project_migration_banner=self.show_project_migration_banner, + ) diff --git a/components/renku_data_services/utils/etag.py b/components/renku_data_services/utils/etag.py index 11521cae6..393ac6fc2 100644 --- a/components/renku_data_services/utils/etag.py +++ b/components/renku_data_services/utils/etag.py @@ -2,11 +2,28 @@ from datetime import datetime from hashlib import md5 +from typing import Any -def compute_etag_from_timestamp(updated_at: datetime, include_quotes: bool = False) -> str: +def compute_etag_from_timestamp(updated_at: datetime) -> str: """Computes an entity tag value by hashing the updated_at value.""" etag = md5(updated_at.isoformat().encode(), usedforsecurity=False).hexdigest().upper() - if not include_quotes: - return etag return f'"{etag}"' + + +def compute_etag_from_fields(updated_at: datetime, *args: Any) -> str: + """Computes an entity tag value by hashing the field values. + + By convention, the first field should be `updated_at`. + """ + values: list[Any] = [updated_at] + values.extend(arg for arg in args) + to_hash = "-".join(_get_hashable_string(value) for value in values) + etag = md5(to_hash.encode(), usedforsecurity=False).hexdigest().upper() + return f'"{etag}"' + + +def _get_hashable_string(value: Any) -> str: + if isinstance(value, datetime): + return value.isoformat() + return str(value) diff --git a/components/renku_pack_builder/manifests/buildrun.yaml b/components/renku_pack_builder/manifests/buildrun.yaml new file mode 100644 index 000000000..ee8b829b4 --- /dev/null +++ b/components/renku_pack_builder/manifests/buildrun.yaml @@ -0,0 +1,32 @@ +--- +# Example BuildRun generated by renku-data-services +# Session Launcher Configuration: +# container_image: "harbor.dev.renku.ch//:" +# default_url: "/" +# port: +# mount_directory: "/home/ubuntu/work" +# working_directory: "/home/ubuntu/work" +# uid: 1000 +# gid: 1000 +# command: ["bash"] +# args: ["/entrypoint.sh"] +apiVersion: shipwright.io/v1beta1 +kind: BuildRun +metadata: + name: renku-01jmcagx9nfphatdd4ev99bmrq +spec: + build: + spec: + source: + type: Git + git: + url: https://gitlab.dev.renku.ch/flora.thiebaut/python-simple.git + strategy: + name: renku-buildpacks + kind: BuildStrategy + paramValues: + - name: run-image + value: renku/renkulab-vscodium-python-runimage:ubuntu-c794f36 + output: + image: harbor.dev.renku.ch/renku-builds/renku-build:renku-01jmcagx9nfphatdd4ev99bmrq + pushSecret: renku-builds-docker-secret diff --git a/components/renku_pack_builder/manifests/buildstrategy.yaml b/components/renku_pack_builder/manifests/buildstrategy.yaml new file mode 100644 index 000000000..e5d4b45a1 --- /dev/null +++ b/components/renku_pack_builder/manifests/buildstrategy.yaml @@ -0,0 +1,90 @@ +--- +apiVersion: shipwright.io/v1beta1 +kind: BuildStrategy +metadata: + name: renku-buildpacks-v2 +spec: + parameters: + - name: platform-api-version + description: The referenced version is the minimum version that all relevant buildpack implementations support. + default: "0.12" + - name: run-image + description: The image to use as the base for all session images built with this strategy + default: "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/base-image:0.0.6" + - name: builder-image + description: The buildpack builder image to use + default: "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/selector:0.0.6" + - name: frontend + description: Which frontend should be used in the image. Either "jupyterlab" or "vscodium". + default: vscodium + steps: + - name: build-and-push + image: $(params.builder-image) + imagePullPolicy: IfNotPresent + env: + - name: CNB_PLATFORM_API + value: $(params.platform-api-version) + - name: BP_RENKU_FRONTENDS + value: $(params.frontend) + - name: PARAM_SOURCE_CONTEXT + value: $(params.shp-source-context) + - name: PARAM_OUTPUT_IMAGE + value: $(params.shp-output-image) + - name: PARAM_RUN_IMAGE + value: $(params.run-image) + command: + - /bin/bash + args: + - -c + - | + set -euo pipefail + + echo "> Processing environment variables..." + ENV_DIR="/platform/env" + + envs=($(env)) + + # Denying the creation of non required files from system environments. + # The creation of a file named PATH (corresponding to PATH system environment) + # caused failure for python source during pip install (https://github.com/Azure-Samples/python-docs-hello-world) + block_list=("PATH" "HOSTNAME" "PWD" "_" "SHLVL" "HOME" "") + + for env in "${envs[@]}"; do + blocked=false + + IFS='=' read -r key value string <<< "$env" + + for str in "${block_list[@]}"; do + if [[ "$key" == "$str" ]]; then + blocked=true + break + fi + done + + if [ "$blocked" == "false" ]; then + path="${ENV_DIR}/${key}" + echo -n "$value" > "$path" + fi + done + + /cnb/lifecycle/creator \ + -app=${PARAM_SOURCE_CONTEXT} \ + -report=/tmp/report.toml \ + -run-image=${PARAM_RUN_IMAGE} \ + ${PARAM_OUTPUT_IMAGE} + volumeMounts: + - mountPath: /platform/env + name: platform-env + resources: + limits: + cpu: 4000m + memory: 8Gi + requests: + cpu: 2000m + memory: 4Gi + volumes: + - name: platform-env + emptyDir: {} + securityContext: + runAsUser: 1001 + runAsGroup: 1000 diff --git a/development/.keep b/development/.keep deleted file mode 100644 index e69de29bb..000000000 diff --git a/flake.lock b/flake.lock index 4973e05b3..a5efc080f 100644 --- a/flake.lock +++ b/flake.lock @@ -3,14 +3,16 @@ "devshell-tools": { "inputs": { "flake-utils": "flake-utils", - "nixpkgs": "nixpkgs" + "nixpkgs": [ + "nixpkgs" + ] }, "locked": { - "lastModified": 1724320658, - "narHash": "sha256-YTgT2yHf936sxRHZiggOf5g+wcfxnlz9fAbRSKh5ukI=", + "lastModified": 1750151194, + "narHash": "sha256-2/U7Ifd46PFr7ZT0wWWAVY2usfqfVqN+/a5F0sXFsWQ=", "owner": "eikek", "repo": "devshell-tools", - "rev": "8f2b96bb183d647ca84a8538305f2b0529527b18", + "rev": "1db1256aba5f93e13f519c80bcbc51368a358491", "type": "github" }, "original": { @@ -24,11 +26,11 @@ "systems": "systems" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { @@ -42,11 +44,11 @@ "systems": "systems_2" }, "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", + "lastModified": 1731533236, + "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=", "owner": "numtide", "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", + "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b", "type": "github" }, "original": { @@ -55,121 +57,27 @@ "type": "github" } }, - "flake-utils_3": { - "inputs": { - "systems": "systems_3" - }, - "locked": { - "lastModified": 1710146030, - "narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", - "owner": "numtide", - "repo": "flake-utils", - "rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "flake-utils", - "type": "github" - } - }, - "nix-github-actions": { - "inputs": { - "nixpkgs": [ - "poetry2nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1703863825, - "narHash": "sha256-rXwqjtwiGKJheXB43ybM8NwWB8rO2dSRrEqes0S7F5Y=", - "owner": "nix-community", - "repo": "nix-github-actions", - "rev": "5163432afc817cf8bd1f031418d1869e4c9d5547", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "nix-github-actions", - "type": "github" - } - }, "nixpkgs": { "locked": { - "lastModified": 1721548954, - "narHash": "sha256-7cCC8+Tdq1+3OPyc3+gVo9dzUNkNIQfwSDJ2HSi2u3o=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "63d37ccd2d178d54e7fb691d7ec76000740ea24a", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "nixos-24.05", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_2": { - "locked": { - "lastModified": 1724224171, - "narHash": "sha256-zyKHydkh5PPQiDPKsgpUmtHGAjR5sNcCKAe56+3W4K0=", - "owner": "NixOS", - "repo": "nixpkgs", - "rev": "aea6e7b3614e103cdab310050a9046a4985886d6", - "type": "github" - }, - "original": { - "owner": "NixOS", - "ref": "release-24.05", - "repo": "nixpkgs", - "type": "github" - } - }, - "nixpkgs_3": { - "locked": { - "lastModified": 1719763542, - "narHash": "sha256-mXkOj9sJ0f69Nkc2dGGOWtof9d1YNY8Le/Hia3RN+8Q=", + "lastModified": 1750776420, + "narHash": "sha256-/CG+w0o0oJ5itVklOoLbdn2dGB0wbZVOoDm4np6w09A=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "e6cdd8a11b26b4d60593733106042141756b54a3", + "rev": "30a61f056ac492e3b7cdcb69c1e6abdcf00e39cf", "type": "github" }, "original": { "owner": "NixOS", - "ref": "nixos-unstable-small", + "ref": "nixos-unstable", "repo": "nixpkgs", "type": "github" } }, - "poetry2nix": { - "inputs": { - "flake-utils": "flake-utils_3", - "nix-github-actions": "nix-github-actions", - "nixpkgs": "nixpkgs_3", - "systems": "systems_4", - "treefmt-nix": "treefmt-nix" - }, - "locked": { - "lastModified": 1724208502, - "narHash": "sha256-TCRcEPSfgAw/t7kClmlr23s591N06mQCrhzlAO7cyFw=", - "owner": "nix-community", - "repo": "poetry2nix", - "rev": "884b66152b0c625b8220b570a31dc7acc36749a3", - "type": "github" - }, - "original": { - "owner": "nix-community", - "repo": "poetry2nix", - "type": "github" - } - }, "root": { "inputs": { "devshell-tools": "devshell-tools", "flake-utils": "flake-utils_2", - "nixpkgs": "nixpkgs_2", - "poetry2nix": "poetry2nix" + "nixpkgs": "nixpkgs" } }, "systems": { @@ -201,56 +109,6 @@ "repo": "default", "type": "github" } - }, - "systems_3": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "owner": "nix-systems", - "repo": "default", - "type": "github" - } - }, - "systems_4": { - "locked": { - "lastModified": 1681028828, - "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=", - "owner": "nix-systems", - "repo": "default", - "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e", - "type": "github" - }, - "original": { - "id": "systems", - "type": "indirect" - } - }, - "treefmt-nix": { - "inputs": { - "nixpkgs": [ - "poetry2nix", - "nixpkgs" - ] - }, - "locked": { - "lastModified": 1719749022, - "narHash": "sha256-ddPKHcqaKCIFSFc/cvxS14goUhCOAwsM1PbMr0ZtHMg=", - "owner": "numtide", - "repo": "treefmt-nix", - "rev": "8df5ff62195d4e67e2264df0b7f5e8c9995fd0bd", - "type": "github" - }, - "original": { - "owner": "numtide", - "repo": "treefmt-nix", - "type": "github" - } } }, "root": "root", diff --git a/flake.nix b/flake.nix index 183f9c1ad..15f71476a 100644 --- a/flake.nix +++ b/flake.nix @@ -1,9 +1,9 @@ { inputs = { - nixpkgs.url = "github:NixOS/nixpkgs/release-24.05"; + nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; flake-utils.url = "github:numtide/flake-utils"; devshell-tools.url = "github:eikek/devshell-tools"; - poetry2nix.url = "github:nix-community/poetry2nix"; + devshell-tools.inputs.nixpkgs.follows = "nixpkgs"; }; outputs = inputs @ { @@ -11,14 +11,16 @@ nixpkgs, flake-utils, devshell-tools, - poetry2nix, }: { nixosConfigurations = let + system = flake-utils.lib.system.x86_64-linux; services = { services.dev-postgres = { enable = true; - databases = ["renku"]; + databases = ["renku_test"]; + init-script = ./.devcontainer/generate_ulid_func.sql; + pkg = nixpkgs.legacyPackages.${system}.postgresql_16; pgweb = { enable = true; database = "renku"; @@ -31,10 +33,15 @@ enable = true; openapi-spec = "http://localhost:8000/api/data/spec.json"; }; + services.dev-solr = { + enable = true; + cores = ["renku-search-dev"]; + heap = 1024; + }; }; in { rdsdev-vm = devshell-tools.lib.mkVm { - system = flake-utils.lib.system.x86_64-linux; + inherit system; modules = [ { virtualisation.memorySize = 2048; @@ -55,114 +62,174 @@ pkgs = nixpkgs.legacyPackages.${system}; devshellToolsPkgs = devshell-tools.packages.${system}; - poetryLib = poetry2nix.lib.mkPoetry2Nix {inherit pkgs;}; - p2n-args = { - projectDir = ./.; - python = pkgs.python312; - editablePackageSources = { - bases = ./bases; - components = ./components; + rclone-sdsc = pkgs.rclone.overrideAttrs (old: { + version = "1.70.0"; + vendorHash = "sha256-9yEWEM96cRUzp1mRXEzxvOaBZQsf7Zifoe163OtJCPw="; + nativeInstallCheckInputs = []; + src = pkgs.fetchFromGitHub { + owner = "SwissDataScienceCenter"; + repo = "rclone"; + rev = "v1.70.0+renku-1"; + sha256 = "sha256-JJk3H9aExACIxSGwZYgZzuefeoZtJrTUrv7ffk+Xpzg="; }; - extraPackages = p: [ - p.ruff-lsp - ]; - overrides = let - add-setuptools = name: final: prev: - prev.${name}.overridePythonAttrs (old: {buildInputs = (old.buildInputs or []) ++ [prev.setuptools];}); - add-poetry = name: final: prev: - prev.${name}.overridePythonAttrs (old: {buildInputs = (old.buildInputs or []) ++ [prev.poetry];}); - in - poetryLib.defaultPoetryOverrides.extend - (final: prev: { - appier = add-setuptools "appier" final prev; - inflector = add-setuptools "inflector" final prev; - google-api = add-setuptools "google-api" final prev; - sanic-ext = add-setuptools "sanic-ext" final prev; - undictify = add-setuptools "undictify" final prev; - types-cffi = add-setuptools "types-cffi" final prev; - avro-preprocessor = add-setuptools "avro-preprocessor" final prev; - authzed = add-poetry "authzed" final prev; - dataclasses-avroschema = add-poetry "dataclasses-avroschema" final prev; - datamodel-code-generator = add-poetry "datamodel-code-generator" final prev; - kubernetes-asyncio = add-setuptools "kubernetes-asyncio" final prev; - prometheus-sanic = - prev.prometheus-sanic.overridePythonAttrs - ( - old: { - buildInputs = (old.buildInputs or []) ++ [prev.poetry prev.poetry-core]; - # fix the wrong dependency - # see https://github.com/nix-community/poetry2nix/issues/1694 - postPatch = '' - substituteInPlace pyproject.toml --replace "poetry.masonry" "poetry.core.masonry" - ''; - } - ); - }); - }; + }); - projectEnv = poetryLib.mkPoetryEnv p2n-args; - - devSettings = { - CORS_ALLOW_ALL_ORIGINS = "true"; - DB_USER = "dev"; - DB_NAME = "renku"; - DB_PASSWORD = "dev"; - AUTHZ_DB_KEY = "dev"; - AUTHZ_DB_NO_TLS_CONNECTION = "true"; - AUTHZ_DB_GRPC_PORT = "50051"; - ALEMBIC_CONFIG = "./components/renku_data_services/migrations/alembic.ini"; - - # necessary for poetry run … as it might need to compile stuff - # ONLY WHEN NOT using python from nix dev environment - #LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib"; + ruff = pkgs.ruff.overrideAttrs (old: rec { + pname = "ruff"; + version = "0.8.6"; + src = pkgs.fetchFromGitHub { + owner = "astral-sh"; + repo = "ruff"; + tag = "0.8.6"; + hash = "sha256-9YvHmNiKdf5hKqy9tToFSQZM2DNLoIiChcfjQay8wbU="; + }; + cargoDeps = pkgs.rustPlatform.fetchCargoVendor { + inherit src; + name = "${pname}-${version}"; + hash = "sha256-aTzTCDCMhG4cKD9wFNHv6A3VBUifnKgI8a6kelc3bAM="; + }; + }); + + poetrySettings = { + LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib"; POETRY_VIRTUALENVS_PREFER_ACTIVE_PYTHON = "true"; POETRY_VIRTUALENVS_OPTIONS_SYSTEM_SITE_PACKAGES = "true"; + POETRY_INSTALLER_NO_BINARY = "ruff"; }; + devSettings = + poetrySettings + // { + CORS_ALLOW_ALL_ORIGINS = "true"; + DB_USER = "dev"; + DB_NAME = "renku"; + DB_PASSWORD = "dev"; + PGPASSWORD = "dev"; + PSQLRC = pkgs.writeText "rsdrc.sql" '' + SET SEARCH_PATH TO authz,common,connected_services,events,platform,projects,public,resource_pools,secrets,sessions,storage,users + ''; + AUTHZ_DB_KEY = "dev"; + AUTHZ_DB_NO_TLS_CONNECTION = "true"; + AUTHZ_DB_GRPC_PORT = "50051"; + + DUMMY_STORES = "true"; + + ZED_ENDPOINT = "localhost:50051"; + ZED_TOKEN = "dev"; + + SOLR_BIN_PATH = "${devshellToolsPkgs.solr}/bin/solr"; + + shellHook = '' + export FLAKE_ROOT="$(git rev-parse --show-toplevel)" + export PATH="$FLAKE_ROOT/.venv/bin:$PATH" + export ALEMBIC_CONFIG="$FLAKE_ROOT/components/renku_data_services/migrations/alembic.ini" + export NB_SERVER_OPTIONS__DEFAULTS_PATH="$FLAKE_ROOT/server_defaults.json" + export NB_SERVER_OPTIONS__UI_CHOICES_PATH="$FLAKE_ROOT/server_options.json" + ''; + }; - fix-poetry-cfg = pkgs.writeShellScriptBin "poetry-fix-cfg" '' - python_exec="$(which python)" - python_bin="$(dirname "$python_exec")" - python_env="$(dirname "$python_bin")" - - env_path="$(poetry env info -p)" - if [ -z "$env_path" ]; then - poetry env use "$python_exec" - env_path="$(poetry env info -p)" - fi - env_cfg="$env_path/pyvenv.cfg" - - if [ ! -r "$env_path/pyvenv.cfg.bak" ]; then - cp "$env_path/pyvenv.cfg" "$env_path/pyvenv.cfg.bak" - fi - - echo "Fix paths in: $env_cfg" - ${pkgs.gnused}/bin/sed -i -E "s,home = (.*)$,home = $python_bin,g" "$env_cfg" - ${pkgs.gnused}/bin/sed -i -E "s,base-prefix = (.*)$,base-prefix = $python_env,g" "$env_cfg" - ${pkgs.gnused}/bin/sed -i -E "s,base-exec-prefix = (.*)$,base-exec-prefix = $python_env,g" "$env_cfg" - ${pkgs.gnused}/bin/sed -i -E "s,base-executable = (.*)$,base-executable = $python_exec,g" "$env_cfg" - ''; commonPackages = with pkgs; [ redis - postgresql + postgresql_16 jq devshellToolsPkgs.openapi-docs + devshellToolsPkgs.solr + devshellToolsPkgs.postgres-fg spicedb + cargo + rustc spicedb-zed ruff - ruff-lsp poetry - pyright - mypy - rclone - fix-poetry-cfg + python313 + basedpyright + rclone-sdsc + ( + writeShellScriptBin "pg" '' + psql -h $DB_HOST -p $DB_PORT -U dev $DB_NAME + '' + ) + (writeShellScriptBin "pyfix" '' + poetry run ruff check --fix + poetry run ruff format + '') + ( + writeShellScriptBin "poetry-setup" '' + venv_path="$(poetry env info -p)" + if [ "$1" == "-c" ]; then + echo "Removing virtual env at $venv_path" + rm -rf "$venv_path"/* + fi + poetry install + if ! poetry self show --addons | grep poetry-multiproject-plugin > /dev/null; then + poetry self add poetry-multiproject-plugin + fi + if ! poetry self show --addons | grep poetry-polylith-plugin > /dev/null; then + poetry self add poetry-polylith-plugin + fi + '' + ) + ( + writeShellScriptBin "zedl" '' + ${spicedb-zed}/bin/zed --no-verify-ca --insecure --endpoint ''$ZED_ENDPOINT --token ''$ZED_TOKEN $@ + '' + ) ]; in { formatter = pkgs.alejandra; devShells = rec { default = vm; - vm = projectEnv.env.overrideAttrs (oldAttrs: - devSettings + devcontainer = pkgs.mkShell (poetrySettings + // { + buildInputs = + commonPackages + ++ [ + pkgs.devcontainer + (pkgs.writeShellScriptBin "devc" '' + devcontainer exec --workspace-folder $FLAKE_ROOT \ + --remote-env POETRY_VIRTUALENVS_IN_PROJECT=false \ + -- bash -c "$@" + '') + (pkgs.writeShellScriptBin "devcontainer-up" '' + devcontainer up --workspace-folder $FLAKE_ROOT \ + --remote-env POETRY_VIRTUALENVS_IN_PROJECT=false + '') + (pkgs.writeShellScriptBin "devcontainer-build" '' + devcontainer build --workspace-folder $FLAKE_ROOT \ + --remote-env POETRY_VIRTUALENVS_IN_PROJECT=false + '') + (pkgs.writeShellScriptBin "devcontainer-destroy" '' + set -e + docker stop $(docker ps -a -q) + docker container ls -f "name=renku-data-services_*" -a -q | xargs docker rm -f + docker volume ls -f "name=renku-data-services_*" -q | xargs docker volume rm -f + '') + (pkgs.writeShellScriptBin "devcontainer-main-tests" '' + devcontainer exec --workspace-folder $FLAKE_ROOT \ + --remote-env POETRY_VIRTUALENVS_IN_PROJECT=false \ + -- bash -c "make main_tests" + '') + (pkgs.writeShellScriptBin "devcontainer-schemathesis" '' + devcontainer exec --workspace-folder $FLAKE_ROOT \ + --remote-env POETRY_VIRTUALENVS_IN_PROJECT=false \ + --remote-env HYPOTHESIS_PROFILE=ci \ + -- bash -c "make schemathesis_tests" + '') + (pkgs.writeShellScriptBin "devcontainer-pytest" '' + devcontainer exec --workspace-folder $FLAKE_ROOT \ + --remote-env POETRY_VIRTUALENVS_IN_PROJECT=false \ + --remote-env HYPOTHESIS_PROFILE=ci \ + --remote-env DUMMY_STORES=true \ + -- bash -c "poetry run pytest --no-cov -p no:warnings -s \"$@\"" + '') + ]; + + shellHook = '' + export FLAKE_ROOT="$(git rev-parse --show-toplevel)" + export PATH="$FLAKE_ROOT/.venv/bin:$PATH" + ''; + }); + vm = pkgs.mkShell (devSettings // { buildInputs = commonPackages @@ -174,6 +241,8 @@ DB_HOST = "localhost"; DB_PORT = "15432"; AUTHZ_DB_HOST = "localhost"; + SOLR_URL = "http://localhost:18983"; + SOLR_CORE = "renku-search-dev"; }); cnt = let @@ -183,7 +252,7 @@ ${pkgs.socat}/bin/socat TCP-LISTEN:50051,fork TCP:rsdevcnt:50051 ''; in - projectEnv.env.overrideAttrs (oldAttrs: + pkgs.mkShell ( devSettings // { buildInputs = @@ -196,7 +265,10 @@ DB_HOST = "rsdevcnt"; DB_PORT = "5432"; AUTHZ_DB_HOST = "localhost"; - }); + SOLR_URL = "http://rsdevcnt:8983"; + SOLR_CORE = "renku-search-dev"; + } + ); }; }); } diff --git a/helm-chart/renku-data-services/values.yaml b/helm-chart/renku-data-services/values.yaml index 5b2f15c74..84c7b055f 100644 --- a/helm-chart/renku-data-services/values.yaml +++ b/helm-chart/renku-data-services/values.yaml @@ -2,10 +2,18 @@ dataService: image: repository: renku/renku-data-service tag: + dataTasks: + image: + repository: renku/data-service-data-tasks + tag: backgroundJobs: image: repository: renku/data-service-background-jobs tag: + k8sWatcher: + image: + repository: renku/data-service-k8s-watcher + tag: secretsStorage: image: repository: renku/secrets-storage diff --git a/poetry.lock b/poetry.lock index b7d5a032d..1c941b63b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiofiles" @@ -6,6 +6,7 @@ version = "24.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, @@ -13,98 +14,105 @@ files = [ [[package]] name = "aiohappyeyeballs" -version = "2.4.3" +version = "2.6.1" description = "Happy Eyeballs for asyncio" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "aiohappyeyeballs-2.4.3-py3-none-any.whl", hash = "sha256:8a7a83727b2756f394ab2895ea0765a0a8c475e3c71e98d43d76f22b4b435572"}, - {file = "aiohappyeyeballs-2.4.3.tar.gz", hash = "sha256:75cf88a15106a5002a8eb1dab212525c00d1f4c0fa96e551c9fbe6f09a621586"}, + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, ] [[package]] name = "aiohttp" -version = "3.11.7" +version = "3.11.18" description = "Async http client/server framework (asyncio)" optional = false python-versions = ">=3.9" -files = [ - {file = "aiohttp-3.11.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8bedb1f6cb919af3b6353921c71281b1491f948ca64408871465d889b4ee1b66"}, - {file = "aiohttp-3.11.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f5022504adab881e2d801a88b748ea63f2a9d130e0b2c430824682a96f6534be"}, - {file = "aiohttp-3.11.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e22d1721c978a6494adc824e0916f9d187fa57baeda34b55140315fa2f740184"}, - {file = "aiohttp-3.11.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e993676c71288618eb07e20622572b1250d8713e7e00ab3aabae28cb70f3640d"}, - {file = "aiohttp-3.11.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e13a05db87d3b241c186d0936808d0e4e12decc267c617d54e9c643807e968b6"}, - {file = "aiohttp-3.11.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ba8d043fed7ffa117024d7ba66fdea011c0e7602327c6d73cacaea38abe4491"}, - {file = "aiohttp-3.11.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dda3ed0a7869d2fa16aa41f9961ade73aa2c2e3b2fcb0a352524e7b744881889"}, - {file = "aiohttp-3.11.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:43bfd25113c1e98aec6c70e26d5f4331efbf4aa9037ba9ad88f090853bf64d7f"}, - {file = "aiohttp-3.11.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3dd3e7e7c9ef3e7214f014f1ae260892286647b3cf7c7f1b644a568fd410f8ca"}, - {file = "aiohttp-3.11.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:78c657ece7a73b976905ab9ec8be9ef2df12ed8984c24598a1791c58ce3b4ce4"}, - {file = "aiohttp-3.11.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:db70a47987e34494b451a334605bee57a126fe8d290511349e86810b4be53b01"}, - {file = "aiohttp-3.11.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:9e67531370a3b07e49b280c1f8c2df67985c790ad2834d1b288a2f13cd341c5f"}, - {file = "aiohttp-3.11.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9202f184cc0582b1db15056f2225ab4c1e3dac4d9ade50dd0613ac3c46352ac2"}, - {file = "aiohttp-3.11.7-cp310-cp310-win32.whl", hash = "sha256:2257bdd5cf54a4039a4337162cd8048f05a724380a2283df34620f55d4e29341"}, - {file = "aiohttp-3.11.7-cp310-cp310-win_amd64.whl", hash = "sha256:b7215bf2b53bc6cb35808149980c2ae80a4ae4e273890ac85459c014d5aa60ac"}, - {file = "aiohttp-3.11.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cea52d11e02123f125f9055dfe0ccf1c3857225fb879e4a944fae12989e2aef2"}, - {file = "aiohttp-3.11.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3ce18f703b7298e7f7633efd6a90138d99a3f9a656cb52c1201e76cb5d79cf08"}, - {file = "aiohttp-3.11.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:670847ee6aeb3a569cd7cdfbe0c3bec1d44828bbfbe78c5d305f7f804870ef9e"}, - {file = "aiohttp-3.11.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4dda726f89bfa5c465ba45b76515135a3ece0088dfa2da49b8bb278f3bdeea12"}, - {file = "aiohttp-3.11.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c25b74a811dba37c7ea6a14d99eb9402d89c8d739d50748a75f3cf994cf19c43"}, - {file = "aiohttp-3.11.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5522ee72f95661e79db691310290c4618b86dff2d9b90baedf343fd7a08bf79"}, - {file = "aiohttp-3.11.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fbf41a6bbc319a7816ae0f0177c265b62f2a59ad301a0e49b395746eb2a9884"}, - {file = "aiohttp-3.11.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59ee1925b5a5efdf6c4e7be51deee93984d0ac14a6897bd521b498b9916f1544"}, - {file = "aiohttp-3.11.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:24054fce8c6d6f33a3e35d1c603ef1b91bbcba73e3f04a22b4f2f27dac59b347"}, - {file = "aiohttp-3.11.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:351849aca2c6f814575c1a485c01c17a4240413f960df1bf9f5deb0003c61a53"}, - {file = "aiohttp-3.11.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:12724f3a211fa243570e601f65a8831372caf1a149d2f1859f68479f07efec3d"}, - {file = "aiohttp-3.11.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:7ea4490360b605804bea8173d2d086b6c379d6bb22ac434de605a9cbce006e7d"}, - {file = "aiohttp-3.11.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e0bf378db07df0a713a1e32381a1b277e62ad106d0dbe17b5479e76ec706d720"}, - {file = "aiohttp-3.11.7-cp311-cp311-win32.whl", hash = "sha256:cd8d62cab363dfe713067027a5adb4907515861f1e4ce63e7be810b83668b847"}, - {file = "aiohttp-3.11.7-cp311-cp311-win_amd64.whl", hash = "sha256:bf0e6cce113596377cadda4e3ac5fb89f095bd492226e46d91b4baef1dd16f60"}, - {file = "aiohttp-3.11.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4bb7493c3e3a36d3012b8564bd0e2783259ddd7ef3a81a74f0dbfa000fce48b7"}, - {file = "aiohttp-3.11.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e143b0ef9cb1a2b4f74f56d4fbe50caa7c2bb93390aff52f9398d21d89bc73ea"}, - {file = "aiohttp-3.11.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f7c58a240260822dc07f6ae32a0293dd5bccd618bb2d0f36d51c5dbd526f89c0"}, - {file = "aiohttp-3.11.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d20cfe63a1c135d26bde8c1d0ea46fd1200884afbc523466d2f1cf517d1fe33"}, - {file = "aiohttp-3.11.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12e4d45847a174f77b2b9919719203769f220058f642b08504cf8b1cf185dacf"}, - {file = "aiohttp-3.11.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cf4efa2d01f697a7dbd0509891a286a4af0d86902fc594e20e3b1712c28c0106"}, - {file = "aiohttp-3.11.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ee6a4cdcbf54b8083dc9723cdf5f41f722c00db40ccf9ec2616e27869151129"}, - {file = "aiohttp-3.11.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6095aaf852c34f42e1bd0cf0dc32d1e4b48a90bfb5054abdbb9d64b36acadcb"}, - {file = "aiohttp-3.11.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1cf03d27885f8c5ebf3993a220cc84fc66375e1e6e812731f51aab2b2748f4a6"}, - {file = "aiohttp-3.11.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1a17f6a230f81eb53282503823f59d61dff14fb2a93847bf0399dc8e87817307"}, - {file = "aiohttp-3.11.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:481f10a1a45c5f4c4a578bbd74cff22eb64460a6549819242a87a80788461fba"}, - {file = "aiohttp-3.11.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:db37248535d1ae40735d15bdf26ad43be19e3d93ab3f3dad8507eb0f85bb8124"}, - {file = "aiohttp-3.11.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9d18a8b44ec8502a7fde91446cd9c9b95ce7c49f1eacc1fb2358b8907d4369fd"}, - {file = "aiohttp-3.11.7-cp312-cp312-win32.whl", hash = "sha256:3d1c9c15d3999107cbb9b2d76ca6172e6710a12fda22434ee8bd3f432b7b17e8"}, - {file = "aiohttp-3.11.7-cp312-cp312-win_amd64.whl", hash = "sha256:018f1b04883a12e77e7fc161934c0f298865d3a484aea536a6a2ca8d909f0ba0"}, - {file = "aiohttp-3.11.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:241a6ca732d2766836d62c58c49ca7a93d08251daef0c1e3c850df1d1ca0cbc4"}, - {file = "aiohttp-3.11.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:aa3705a8d14de39898da0fbad920b2a37b7547c3afd2a18b9b81f0223b7d0f68"}, - {file = "aiohttp-3.11.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9acfc7f652b31853eed3b92095b0acf06fd5597eeea42e939bd23a17137679d5"}, - {file = "aiohttp-3.11.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcefcf2915a2dbdbce37e2fc1622129a1918abfe3d06721ce9f6cdac9b6d2eaa"}, - {file = "aiohttp-3.11.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c1f6490dd1862af5aae6cfcf2a274bffa9a5b32a8f5acb519a7ecf5a99a88866"}, - {file = "aiohttp-3.11.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1ac5462582d6561c1c1708853a9faf612ff4e5ea5e679e99be36143d6eabd8e"}, - {file = "aiohttp-3.11.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1a6309005acc4b2bcc577ba3b9169fea52638709ffacbd071f3503264620da"}, - {file = "aiohttp-3.11.7-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5b973cce96793725ef63eb449adfb74f99c043c718acb76e0d2a447ae369962"}, - {file = "aiohttp-3.11.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ce91a24aac80de6be8512fb1c4838a9881aa713f44f4e91dd7bb3b34061b497d"}, - {file = "aiohttp-3.11.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:875f7100ce0e74af51d4139495eec4025affa1a605280f23990b6434b81df1bd"}, - {file = "aiohttp-3.11.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c171fc35d3174bbf4787381716564042a4cbc008824d8195eede3d9b938e29a8"}, - {file = "aiohttp-3.11.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ee9afa1b0d2293c46954f47f33e150798ad68b78925e3710044e0d67a9487791"}, - {file = "aiohttp-3.11.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8360c7cc620abb320e1b8d603c39095101391a82b1d0be05fb2225471c9c5c52"}, - {file = "aiohttp-3.11.7-cp313-cp313-win32.whl", hash = "sha256:7a9318da4b4ada9a67c1dd84d1c0834123081e746bee311a16bb449f363d965e"}, - {file = "aiohttp-3.11.7-cp313-cp313-win_amd64.whl", hash = "sha256:fc6da202068e0a268e298d7cd09b6e9f3997736cd9b060e2750963754552a0a9"}, - {file = "aiohttp-3.11.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:17829f37c0d31d89aa6b8b010475a10233774771f9b6dc2cc352ea4f8ce95d9a"}, - {file = "aiohttp-3.11.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d6177077a31b1aecfc3c9070bd2f11419dbb4a70f30f4c65b124714f525c2e48"}, - {file = "aiohttp-3.11.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:badda65ac99555791eed75e234afb94686ed2317670c68bff8a4498acdaee935"}, - {file = "aiohttp-3.11.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0de6466b9d742b4ee56fe1b2440706e225eb48c77c63152b1584864a236e7a50"}, - {file = "aiohttp-3.11.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04b0cc74d5a882c9dacaeeccc1444f0233212b6f5be8bc90833feef1e1ce14b9"}, - {file = "aiohttp-3.11.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c7af3e50e5903d21d7b935aceed901cc2475463bc16ddd5587653548661fdb"}, - {file = "aiohttp-3.11.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c63f898f683d1379b9be5afc3dd139e20b30b0b1e0bf69a3fc3681f364cf1629"}, - {file = "aiohttp-3.11.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fdadc3f6a32d6eca45f9a900a254757fd7855dfb2d8f8dcf0e88f0fae3ff8eb1"}, - {file = "aiohttp-3.11.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d329300fb23e14ed1f8c6d688dfd867d1dcc3b1d7cd49b7f8c5b44e797ce0932"}, - {file = "aiohttp-3.11.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5578cf40440eafcb054cf859964bc120ab52ebe0e0562d2b898126d868749629"}, - {file = "aiohttp-3.11.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:7b2f8107a3c329789f3c00b2daad0e35f548d0a55cda6291579136622099a46e"}, - {file = "aiohttp-3.11.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:43dd89a6194f6ab02a3fe36b09e42e2df19c211fc2050ce37374d96f39604997"}, - {file = "aiohttp-3.11.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d2fa6fc7cc865d26ff42480ac9b52b8c9b7da30a10a6442a9cdf429de840e949"}, - {file = "aiohttp-3.11.7-cp39-cp39-win32.whl", hash = "sha256:a7d9a606355655617fee25dd7e54d3af50804d002f1fd3118dd6312d26692d70"}, - {file = "aiohttp-3.11.7-cp39-cp39-win_amd64.whl", hash = "sha256:53c921b58fdc6485d6b2603e0132bb01cd59b8f0620ffc0907f525e0ba071687"}, - {file = "aiohttp-3.11.7.tar.gz", hash = "sha256:01a8aca4af3da85cea5c90141d23f4b0eee3cbecfd33b029a45a80f28c66c668"}, +groups = ["main"] +files = [ + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868"}, + {file = "aiohttp-3.11.18-cp310-cp310-win32.whl", hash = "sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f"}, + {file = "aiohttp-3.11.18-cp310-cp310-win_amd64.whl", hash = "sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd"}, + {file = "aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d"}, + {file = "aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea"}, + {file = "aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8"}, + {file = "aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7"}, + {file = "aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78"}, + {file = "aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935"}, + {file = "aiohttp-3.11.18-cp39-cp39-win32.whl", hash = "sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc"}, + {file = "aiohttp-3.11.18-cp39-cp39-win_amd64.whl", hash = "sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef"}, + {file = "aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a"}, ] [package.dependencies] @@ -117,17 +125,18 @@ propcache = ">=0.2.0" yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns (>=3.2.0)", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiosignal" -version = "1.3.1" +version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [package.dependencies] @@ -139,6 +148,7 @@ version = "0.20.0" description = "asyncio bridge to the standard sqlite3 module" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "aiosqlite-0.20.0-py3-none-any.whl", hash = "sha256:36a1deaca0cac40ebe32aac9977a6e2bbc7f5189f23f4a54d5908986729e5bd6"}, {file = "aiosqlite-0.20.0.tar.gz", hash = "sha256:6d35c8c256637f4672f843c31021464090805bf925385ac39473fb16eaaca3d7"}, @@ -153,22 +163,23 @@ docs = ["sphinx (==7.2.6)", "sphinx-mdinclude (==0.5.3)"] [[package]] name = "alembic" -version = "1.14.0" +version = "1.15.2" description = "A database migration tool for SQLAlchemy." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, - {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, + {file = "alembic-1.15.2-py3-none-any.whl", hash = "sha256:2e76bd916d547f6900ec4bb5a90aeac1485d2c92536923d0b138c02b126edc53"}, + {file = "alembic-1.15.2.tar.gz", hash = "sha256:1c72391bbdeffccfe317eefba686cb9a3c078005478885413b95c3b26c57a8a7"}, ] [package.dependencies] Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" +SQLAlchemy = ">=1.4.0" +typing-extensions = ">=4.12" [package.extras] -tz = ["backports.zoneinfo"] +tz = ["tzdata"] [[package]] name = "annotated-types" @@ -176,6 +187,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -183,13 +195,14 @@ files = [ [[package]] name = "anyio" -version = "4.6.2.post1" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, - {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] @@ -197,30 +210,52 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] trio = ["trio (>=0.26.1)"] [[package]] name = "argcomplete" -version = "3.5.1" +version = "3.6.2" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "argcomplete-3.5.1-py3-none-any.whl", hash = "sha256:1a1d148bdaa3e3b93454900163403df41448a248af01b6e849edc5ac08e6c363"}, - {file = "argcomplete-3.5.1.tar.gz", hash = "sha256:eb1ee355aa2557bd3d0145de7b06b2a45b0ce461e1e7813f5d066039ab4177b4"}, + {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, + {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, ] [package.extras] test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] +[[package]] +name = "arrow" +version = "1.3.0" +description = "Better dates & times for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "arrow-1.3.0-py3-none-any.whl", hash = "sha256:c728b120ebc00eb84e01882a6f5e7927a53960aa990ce7dd2b10f39005a67f80"}, + {file = "arrow-1.3.0.tar.gz", hash = "sha256:d4540617648cb5f895730f1ad8c82a65f2dad0166f57b75f3ca54759c4d67a85"}, +] + +[package.dependencies] +python-dateutil = ">=2.7.0" +types-python-dateutil = ">=2.8.10" + +[package.extras] +doc = ["doc8", "sphinx (>=7.0.0)", "sphinx-autobuild", "sphinx-autodoc-typehints", "sphinx_rtd_theme (>=1.3.0)"] +test = ["dateparser (==1.*)", "pre-commit", "pytest", "pytest-cov", "pytest-mock", "pytz (==2021.1)", "simplejson (==3.*)"] + [[package]] name = "asyncache" version = "0.3.1" description = "Helpers to use cachetools with async code." optional = false python-versions = ">=3.8,<4.0" +groups = ["main"] files = [ {file = "asyncache-0.3.1-py3-none-any.whl", hash = "sha256:ef20a1024d265090dd1e0785c961cf98b9c32cc7d9478973dcf25ac1b80011f5"}, {file = "asyncache-0.3.1.tar.gz", hash = "sha256:9a1e60a75668e794657489bdea6540ee7e3259c483517b934670db7600bf5035"}, @@ -235,6 +270,7 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -289,37 +325,39 @@ files = [ [package.extras] docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi", "sspilib"] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] [[package]] name = "attrs" -version = "24.2.0" +version = "25.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "authlib" -version = "1.3.2" +version = "1.6.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, - {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, + {file = "authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d"}, + {file = "authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210"}, ] [package.dependencies] @@ -327,13 +365,14 @@ cryptography = "*" [[package]] name = "authzed" -version = "1.1.0" +version = "1.21.1" description = "Client library for SpiceDB." optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" +groups = ["main"] files = [ - {file = "authzed-1.1.0-py3-none-any.whl", hash = "sha256:1c37038655c55d054b5caf918d60d680262fda4bc2787dc83576b4424e358214"}, - {file = "authzed-1.1.0.tar.gz", hash = "sha256:6e1300ff75af1840acdb3e0b2bc0dec31a8cf631c4ac6fc1ac674b9ea02d043a"}, + {file = "authzed-1.21.1-py3-none-any.whl", hash = "sha256:9a830c0e9eefc506181f0d82c9a9f73405db46d50e8ecaedd4488486a2792959"}, + {file = "authzed-1.21.1.tar.gz", hash = "sha256:c354d19af5ef1a393381d5be670dd946916742573ae2bf3ac87becdbf44f093b"}, ] [package.dependencies] @@ -341,24 +380,7 @@ googleapis-common-protos = ">=1.65.0,<2.0.0" grpc-interceptor = ">=0.15.4,<0.16.0" grpcio = ">=1.63,<2.0" protobuf = ">=5.26,<6" - -[[package]] -name = "avro-preprocessor" -version = "0.3.0" -description = "A preprocessor for Avro Schemata" -optional = false -python-versions = "*" -files = [ - {file = "avro-preprocessor-0.3.0.tar.gz", hash = "sha256:0470941009f49a02f8d6f0357a459b10ad15c6d4e8470f4055074176e654d716"}, - {file = "avro_preprocessor-0.3.0-py3-none-any.whl", hash = "sha256:da402ca763a3304c29f2237ed0f3cc5024dce2e0f60da433e9f93900c28b8400"}, -] - -[package.dependencies] -networkx = ">=2.8.7" -pygments = ">=2.13.0" -requests = ">=2.28.1" -"ruamel.yaml" = ">=0.17.21" -"ruamel.yaml.clib" = ">=0.2.6" +protovalidate = ">=0.7.1,<0.8.0" [[package]] name = "backoff" @@ -366,6 +388,7 @@ version = "2.2.1" description = "Function decoration for backoff and retry" optional = false python-versions = ">=3.7,<4.0" +groups = ["main", "dev"] files = [ {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, @@ -373,13 +396,14 @@ files = [ [[package]] name = "bandit" -version = "1.8.0" +version = "1.8.3" description = "Security oriented static analyser for python code." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "bandit-1.8.0-py3-none-any.whl", hash = "sha256:b1a61d829c0968aed625381e426aa378904b996529d048f8d908fa28f6b13e38"}, - {file = "bandit-1.8.0.tar.gz", hash = "sha256:b5bfe55a095abd9fe20099178a7c6c060f844bfd4fe4c76d28e35e4c52b9d31e"}, + {file = "bandit-1.8.3-py3-none-any.whl", hash = "sha256:28f04dc0d258e1dd0f99dee8eefa13d1cb5e3fde1a5ab0c523971f97b289bcd8"}, + {file = "bandit-1.8.3.tar.gz", hash = "sha256:f5847beb654d309422985c36644649924e0ea4425c76dec2e89110b87506193a"}, ] [package.dependencies] @@ -392,38 +416,39 @@ stevedore = ">=1.20.0" baseline = ["GitPython (>=3.1.30)"] sarif = ["jschema-to-python (>=1.2.3)", "sarif-om (>=1.0.4)"] test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)"] -toml = ["tomli (>=1.1.0)"] +toml = ["tomli (>=1.1.0) ; python_version < \"3.11\""] yaml = ["PyYAML"] [[package]] name = "black" -version = "24.10.0" +version = "25.1.0" description = "The uncompromising code formatter." optional = false python-versions = ">=3.9" -files = [ - {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, - {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, - {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, - {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, - {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, - {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, - {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, - {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, - {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, - {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, - {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, - {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, - {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, - {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, - {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, - {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, - {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, - {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, - {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, - {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, - {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, - {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, +groups = ["main"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] [package.dependencies] @@ -439,37 +464,94 @@ d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] +[[package]] +name = "build" +version = "1.2.2.post1" +description = "A simple, correct Python build frontend" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +packaging = ">=19.1" +pyproject_hooks = "*" + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + +[[package]] +name = "cachecontrol" +version = "0.14.2" +description = "httplib2 caching for requests" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "cachecontrol-0.14.2-py3-none-any.whl", hash = "sha256:ebad2091bf12d0d200dfc2464330db638c5deb41d546f6d7aca079e87290f3b0"}, + {file = "cachecontrol-0.14.2.tar.gz", hash = "sha256:7d47d19f866409b98ff6025b6a0fca8e4c791fb31abbd95f622093894ce903a2"}, +] + +[package.dependencies] +filelock = {version = ">=3.8.0", optional = true, markers = "extra == \"filecache\""} +msgpack = ">=0.5.2,<2.0.0" +requests = ">=2.16.0" + +[package.extras] +dev = ["CacheControl[filecache,redis]", "build", "cherrypy", "codespell[tomli]", "furo", "mypy", "pytest", "pytest-cov", "ruff", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] +filecache = ["filelock (>=3.8.0)"] +redis = ["redis (>=2.10.5)"] + [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] [[package]] -name = "casefy" -version = "0.1.7" -description = "Utilities for string case conversion." +name = "cel-python" +version = "0.2.0" +description = "Pure Python implementation of Google Common Expression Language" optional = false -python-versions = ">=3.6" +python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ - {file = "casefy-0.1.7-py3-none-any.whl", hash = "sha256:ab05ff1c67f2a8e62d9f8986fa9a849416d61ac5413ec57d1f827b4f36589cf6"}, - {file = "casefy-0.1.7.tar.gz", hash = "sha256:6accce985a64b9edb2a610a29ac489d78fac80e52ff8f2d137e294f2f92b8027"}, + {file = "cel_python-0.2.0-py3-none-any.whl", hash = "sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60"}, + {file = "cel_python-0.2.0.tar.gz", hash = "sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f"}, ] +[package.dependencies] +jmespath = ">=1.0.1,<2.0.0" +lark = ">=0.12.0,<0.13.0" +python-dateutil = ">=2.9.0.post0,<3.0.0" +pyyaml = ">=6.0.1,<7.0.0" +types-python-dateutil = ">=2.9.0.20240316,<3.0.0.0" +types-pyyaml = ">=6.0.12.20240311,<7.0.0.0" + [[package]] name = "certifi" -version = "2024.8.30" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -478,6 +560,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -557,6 +640,7 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -564,127 +648,132 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "cleo" +version = "2.1.0" +description = "Cleo allows you to create beautiful and testable command-line interfaces." +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, + {file = "cleo-2.1.0-py3-none-any.whl", hash = "sha256:4a31bd4dd45695a64ee3c4758f583f134267c2bc518d8ae9a29cf237d009b07e"}, + {file = "cleo-2.1.0.tar.gz", hash = "sha256:0b2c880b5d13660a7ea651001fb4acb527696c01f15c9ee650f377aa543fd523"}, ] +[package.dependencies] +crashtest = ">=0.4.1,<0.5.0" +rapidfuzz = ">=3.0.0,<4.0.0" + [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -696,234 +785,224 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {main = "platform_system == \"Windows\" or os_name == \"nt\""} [[package]] name = "coverage" -version = "7.6.8" +version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" -files = [ - {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"}, - {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"}, - {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"}, - {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"}, - {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"}, - {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"}, - {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"}, - {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"}, - {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"}, - {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"}, - {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"}, - {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"}, - {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"}, - {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"}, - {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"}, - {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"}, - {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"}, - {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"}, - {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"}, - {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"}, - {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"}, - {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"}, - {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"}, - {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"}, - {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"}, - {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"}, - {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"}, - {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"}, - {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"}, - {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"}, - {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"}, - {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"}, - {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"}, +groups = ["dev"] +files = [ + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, ] [package.extras] -toml = ["tomli"] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "crashtest" +version = "0.4.1" +description = "Manage Python errors with ease" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "crashtest-0.4.1-py3-none-any.whl", hash = "sha256:8d23eac5fa660409f57472e3851dab7ac18aba459a8d19cbbba86d3d5aecd2a5"}, + {file = "crashtest-0.4.1.tar.gz", hash = "sha256:80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce"}, +] [[package]] name = "cryptography" -version = "44.0.0" +version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" -files = [ - {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, - {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, - {file = "cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd"}, - {file = "cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, - {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, - {file = "cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"}, - {file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"}, +groups = ["main"] +files = [ + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, ] [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "dacite" -version = "1.8.1" -description = "Simple creation of data classes from dictionaries." -optional = false -python-versions = ">=3.6" -files = [ - {file = "dacite-1.8.1-py3-none-any.whl", hash = "sha256:cc31ad6fdea1f49962ea42db9421772afe01ac5442380d9a99fcf3d188c61afe"}, -] - -[package.extras] -dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] - -[[package]] -name = "dataclasses-avroschema" -version = "0.65.4" -description = "Generate Avro Schemas from Python classes. Serialize/Deserialize python instances with avro schemas" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "dataclasses_avroschema-0.65.4-py3-none-any.whl", hash = "sha256:f9a12541c73dfd79d68be4e873b0045b38fc03f31457e76102c91c0df75958d9"}, - {file = "dataclasses_avroschema-0.65.4.tar.gz", hash = "sha256:d91c63b854b397595fb90946840fe02f29c1ca8cec000f3aa79f8f757aae0528"}, -] - -[package.dependencies] -casefy = ">=0.1.7,<0.2.0" -dacite = ">=1.8.0,<2.0.0" -fastavro = ">=1.7.3,<2.0.0" -inflection = ">=0.5.1,<0.6.0" -python-dateutil = ">=2.7,<3.0" -typing-extensions = ">=4.2.0,<5.0.0" - -[package.extras] -cli = ["dc-avro (>=0.6.4)"] -faker = ["faker (>=26.0.0,<31.0.0)"] -faust = ["faust-streaming (>=0.10.11,<0.12.0)"] -pydantic = ["pydantic[email] (>=2.4.2,<3.0.0)"] - [[package]] name = "datamodel-code-generator" -version = "0.24.2" +version = "0.28.5" description = "Datamodel Code Generator" optional = false -python-versions = ">=3.7,<4.0" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "datamodel_code_generator-0.24.2-py3-none-any.whl", hash = "sha256:582c30466def12600d7165c5f624bb63a7e944eeaf8320f282518daf9ccb566c"}, - {file = "datamodel_code_generator-0.24.2.tar.gz", hash = "sha256:d278c751038c8911efc82856ec549ac1e3e13134567387a4bb5ab7ddc6543162"}, + {file = "datamodel_code_generator-0.28.5-py3-none-any.whl", hash = "sha256:f899c1da5af04b5d5b6e3edbd718c1bf3a00fc4b2fe8210cef609d93a9983e9e"}, + {file = "datamodel_code_generator-0.28.5.tar.gz", hash = "sha256:20e8b817d301d2d0bb15f436e81c97b25ad1c2ef922c99249c2444141ae15a6a"}, ] [package.dependencies] -argcomplete = ">=1.10,<4.0" +argcomplete = ">=2.10.1,<4" black = ">=19.10b0" -genson = ">=1.2.1,<2.0" -inflect = ">=4.1.0,<6.0" -isort = ">=4.3.21,<6.0" -jinja2 = ">=2.10.1,<4.0" +genson = ">=1.2.1,<2" +inflect = ">=4.1,<6" +isort = ">=4.3.21,<7" +jinja2 = ">=2.10.1,<4" packaging = "*" -pydantic = {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.12\" and python_version < \"4.0\""} +pydantic = ">=1.5" pyyaml = ">=6.0.1" [package.extras] -debug = ["PySnooper (>=0.4.1,<2.0.0)"] -http = ["httpx"] -validation = ["openapi-spec-validator (>=0.2.8,<0.7.0)", "prance (>=0.18.2)"] +all = ["graphql-core (>=3.2.3)", "httpx (>=0.24.1)", "openapi-spec-validator (>=0.2.8,<0.7)", "prance (>=0.18.2)", "pysnooper (>=0.4.1,<2)", "ruff (>=0.9.10)"] +debug = ["pysnooper (>=0.4.1,<2)"] +graphql = ["graphql-core (>=3.2.3)"] +http = ["httpx (>=0.24.1)"] +ruff = ["ruff (>=0.9.10)"] +validation = ["openapi-spec-validator (>=0.2.8,<0.7)", "prance (>=0.18.2)"] [[package]] name = "debugpy" -version = "1.8.9" +version = "1.8.14" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" -files = [ - {file = "debugpy-1.8.9-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:cfe1e6c6ad7178265f74981edf1154ffce97b69005212fbc90ca22ddfe3d017e"}, - {file = "debugpy-1.8.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada7fb65102a4d2c9ab62e8908e9e9f12aed9d76ef44880367bc9308ebe49a0f"}, - {file = "debugpy-1.8.9-cp310-cp310-win32.whl", hash = "sha256:c36856343cbaa448171cba62a721531e10e7ffb0abff838004701454149bc037"}, - {file = "debugpy-1.8.9-cp310-cp310-win_amd64.whl", hash = "sha256:17c5e0297678442511cf00a745c9709e928ea4ca263d764e90d233208889a19e"}, - {file = "debugpy-1.8.9-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:b74a49753e21e33e7cf030883a92fa607bddc4ede1aa4145172debc637780040"}, - {file = "debugpy-1.8.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d22dacdb0e296966d7d74a7141aaab4bec123fa43d1a35ddcb39bf9fd29d70"}, - {file = "debugpy-1.8.9-cp311-cp311-win32.whl", hash = "sha256:8138efff315cd09b8dcd14226a21afda4ca582284bf4215126d87342bba1cc66"}, - {file = "debugpy-1.8.9-cp311-cp311-win_amd64.whl", hash = "sha256:ff54ef77ad9f5c425398efb150239f6fe8e20c53ae2f68367eba7ece1e96226d"}, - {file = "debugpy-1.8.9-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:957363d9a7a6612a37458d9a15e72d03a635047f946e5fceee74b50d52a9c8e2"}, - {file = "debugpy-1.8.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e565fc54b680292b418bb809f1386f17081d1346dca9a871bf69a8ac4071afe"}, - {file = "debugpy-1.8.9-cp312-cp312-win32.whl", hash = "sha256:3e59842d6c4569c65ceb3751075ff8d7e6a6ada209ceca6308c9bde932bcef11"}, - {file = "debugpy-1.8.9-cp312-cp312-win_amd64.whl", hash = "sha256:66eeae42f3137eb428ea3a86d4a55f28da9bd5a4a3d369ba95ecc3a92c1bba53"}, - {file = "debugpy-1.8.9-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:957ecffff80d47cafa9b6545de9e016ae8c9547c98a538ee96ab5947115fb3dd"}, - {file = "debugpy-1.8.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1efbb3ff61487e2c16b3e033bc8595aea578222c08aaf3c4bf0f93fadbd662ee"}, - {file = "debugpy-1.8.9-cp313-cp313-win32.whl", hash = "sha256:7c4d65d03bee875bcb211c76c1d8f10f600c305dbd734beaed4077e902606fee"}, - {file = "debugpy-1.8.9-cp313-cp313-win_amd64.whl", hash = "sha256:e46b420dc1bea64e5bbedd678148be512442bc589b0111bd799367cde051e71a"}, - {file = "debugpy-1.8.9-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:472a3994999fe6c0756945ffa359e9e7e2d690fb55d251639d07208dbc37caea"}, - {file = "debugpy-1.8.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365e556a4772d7d0d151d7eb0e77ec4db03bcd95f26b67b15742b88cacff88e9"}, - {file = "debugpy-1.8.9-cp38-cp38-win32.whl", hash = "sha256:54a7e6d3014c408eb37b0b06021366ee985f1539e12fe49ca2ee0d392d9ceca5"}, - {file = "debugpy-1.8.9-cp38-cp38-win_amd64.whl", hash = "sha256:8e99c0b1cc7bf86d83fb95d5ccdc4ad0586d4432d489d1f54e4055bcc795f693"}, - {file = "debugpy-1.8.9-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:7e8b079323a56f719977fde9d8115590cb5e7a1cba2fcee0986ef8817116e7c1"}, - {file = "debugpy-1.8.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6953b335b804a41f16a192fa2e7851bdcfd92173cbb2f9f777bb934f49baab65"}, - {file = "debugpy-1.8.9-cp39-cp39-win32.whl", hash = "sha256:7e646e62d4602bb8956db88b1e72fe63172148c1e25c041e03b103a25f36673c"}, - {file = "debugpy-1.8.9-cp39-cp39-win_amd64.whl", hash = "sha256:3d9755e77a2d680ce3d2c5394a444cf42be4a592caaf246dbfbdd100ffcf7ae5"}, - {file = "debugpy-1.8.9-py2.py3-none-any.whl", hash = "sha256:cc37a6c9987ad743d9c3a14fa1b1a14b7e4e6041f9dd0c8abf8895fe7a97b899"}, - {file = "debugpy-1.8.9.zip", hash = "sha256:1339e14c7d980407248f09824d1b25ff5c5616651689f1e0f0e51bdead3ea13e"}, +groups = ["dev"] +files = [ + {file = "debugpy-1.8.14-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:93fee753097e85623cab1c0e6a68c76308cd9f13ffdf44127e6fab4fbf024339"}, + {file = "debugpy-1.8.14-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d937d93ae4fa51cdc94d3e865f535f185d5f9748efb41d0d49e33bf3365bd79"}, + {file = "debugpy-1.8.14-cp310-cp310-win32.whl", hash = "sha256:c442f20577b38cc7a9aafecffe1094f78f07fb8423c3dddb384e6b8f49fd2987"}, + {file = "debugpy-1.8.14-cp310-cp310-win_amd64.whl", hash = "sha256:f117dedda6d969c5c9483e23f573b38f4e39412845c7bc487b6f2648df30fe84"}, + {file = "debugpy-1.8.14-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:1b2ac8c13b2645e0b1eaf30e816404990fbdb168e193322be8f545e8c01644a9"}, + {file = "debugpy-1.8.14-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf431c343a99384ac7eab2f763980724834f933a271e90496944195318c619e2"}, + {file = "debugpy-1.8.14-cp311-cp311-win32.whl", hash = "sha256:c99295c76161ad8d507b413cd33422d7c542889fbb73035889420ac1fad354f2"}, + {file = "debugpy-1.8.14-cp311-cp311-win_amd64.whl", hash = "sha256:7816acea4a46d7e4e50ad8d09d963a680ecc814ae31cdef3622eb05ccacf7b01"}, + {file = "debugpy-1.8.14-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:8899c17920d089cfa23e6005ad9f22582fd86f144b23acb9feeda59e84405b84"}, + {file = "debugpy-1.8.14-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6bb5c0dcf80ad5dbc7b7d6eac484e2af34bdacdf81df09b6a3e62792b722826"}, + {file = "debugpy-1.8.14-cp312-cp312-win32.whl", hash = "sha256:281d44d248a0e1791ad0eafdbbd2912ff0de9eec48022a5bfbc332957487ed3f"}, + {file = "debugpy-1.8.14-cp312-cp312-win_amd64.whl", hash = "sha256:5aa56ef8538893e4502a7d79047fe39b1dae08d9ae257074c6464a7b290b806f"}, + {file = "debugpy-1.8.14-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:329a15d0660ee09fec6786acdb6e0443d595f64f5d096fc3e3ccf09a4259033f"}, + {file = "debugpy-1.8.14-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f920c7f9af409d90f5fd26e313e119d908b0dd2952c2393cd3247a462331f15"}, + {file = "debugpy-1.8.14-cp313-cp313-win32.whl", hash = "sha256:3784ec6e8600c66cbdd4ca2726c72d8ca781e94bce2f396cc606d458146f8f4e"}, + {file = "debugpy-1.8.14-cp313-cp313-win_amd64.whl", hash = "sha256:684eaf43c95a3ec39a96f1f5195a7ff3d4144e4a18d69bb66beeb1a6de605d6e"}, + {file = "debugpy-1.8.14-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:d5582bcbe42917bc6bbe5c12db1bffdf21f6bfc28d4554b738bf08d50dc0c8c3"}, + {file = "debugpy-1.8.14-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5349b7c3735b766a281873fbe32ca9cca343d4cc11ba4a743f84cb854339ff35"}, + {file = "debugpy-1.8.14-cp38-cp38-win32.whl", hash = "sha256:7118d462fe9724c887d355eef395fae68bc764fd862cdca94e70dcb9ade8a23d"}, + {file = "debugpy-1.8.14-cp38-cp38-win_amd64.whl", hash = "sha256:d235e4fa78af2de4e5609073972700523e372cf5601742449970110d565ca28c"}, + {file = "debugpy-1.8.14-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:413512d35ff52c2fb0fd2d65e69f373ffd24f0ecb1fac514c04a668599c5ce7f"}, + {file = "debugpy-1.8.14-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c9156f7524a0d70b7a7e22b2e311d8ba76a15496fb00730e46dcdeedb9e1eea"}, + {file = "debugpy-1.8.14-cp39-cp39-win32.whl", hash = "sha256:b44985f97cc3dd9d52c42eb59ee9d7ee0c4e7ecd62bca704891f997de4cef23d"}, + {file = "debugpy-1.8.14-cp39-cp39-win_amd64.whl", hash = "sha256:b1528cfee6c1b1c698eb10b6b096c598738a8238822d218173d21c3086de8123"}, + {file = "debugpy-1.8.14-py2.py3-none-any.whl", hash = "sha256:5cd9a579d553b6cb9759a7908a41988ee6280b961f24f63336835d9418216a20"}, + {file = "debugpy-1.8.14.tar.gz", hash = "sha256:7cd287184318416850aa8b60ac90105837bb1e59531898c07569d197d2ed5322"}, ] [[package]] @@ -932,6 +1011,7 @@ version = "2.0" description = "A toolset for deeply merging Python dictionaries." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00"}, {file = "deepmerge-2.0.tar.gz", hash = "sha256:5c3d86081fbebd04dd5de03626a0607b809a98fb6ccba5770b62466fe940ff20"}, @@ -946,17 +1026,31 @@ version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["main", "dev"] files = [ {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "dnspython" version = "2.7.0" description = "DNS toolkit" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, @@ -971,12 +1065,80 @@ idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] +[[package]] +name = "dulwich" +version = "0.22.8" +description = "Python Git Library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "dulwich-0.22.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:546176d18b8cc0a492b0f23f07411e38686024cffa7e9d097ae20512a2e57127"}, + {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d2434dd72b2ae09b653c9cfe6764a03c25cfbd99fbbb7c426f0478f6fb1100f"}, + {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8318bc0921d42e3e69f03716f983a301b5ee4c8dc23c7f2c5bbb28581257a9"}, + {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7a0f96a2a87f3b4f7feae79d2ac6b94107d6b7d827ac08f2f331b88c8f597a1"}, + {file = "dulwich-0.22.8-cp310-cp310-win32.whl", hash = "sha256:432a37b25733202897b8d67cdd641688444d980167c356ef4e4dd15a17a39a24"}, + {file = "dulwich-0.22.8-cp310-cp310-win_amd64.whl", hash = "sha256:f3a15e58dac8b8a76073ddca34e014f66f3672a5540a99d49ef6a9c09ab21285"}, + {file = "dulwich-0.22.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0852edc51cff4f4f62976bdaa1d82f6ef248356c681c764c0feb699bc17d5782"}, + {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:826aae8b64ac1a12321d6b272fc13934d8f62804fda2bc6ae46f93f4380798eb"}, + {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7ae726f923057d36cdbb9f4fb7da0d0903751435934648b13f1b851f0e38ea1"}, + {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6987d753227f55cf75ba29a8dab69d1d83308ce483d7a8c6d223086f7a42e125"}, + {file = "dulwich-0.22.8-cp311-cp311-win32.whl", hash = "sha256:7757b4a2aad64c6f1920082fc1fccf4da25c3923a0ae7b242c08d06861dae6e1"}, + {file = "dulwich-0.22.8-cp311-cp311-win_amd64.whl", hash = "sha256:12b243b7e912011c7225dc67480c313ac8d2990744789b876016fb593f6f3e19"}, + {file = "dulwich-0.22.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d81697f74f50f008bb221ab5045595f8a3b87c0de2c86aa55be42ba97421f3cd"}, + {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bff1da8e2e6a607c3cb45f5c2e652739589fe891245e1d5b770330cdecbde41"}, + {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9969099e15b939d3936f8bee8459eaef7ef5a86cd6173393a17fe28ca3d38aff"}, + {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:017152c51b9a613f0698db28c67cf3e0a89392d28050dbf4f4ac3f657ea4c0dc"}, + {file = "dulwich-0.22.8-cp312-cp312-win32.whl", hash = "sha256:ee70e8bb8798b503f81b53f7a103cb869c8e89141db9005909f79ab1506e26e9"}, + {file = "dulwich-0.22.8-cp312-cp312-win_amd64.whl", hash = "sha256:dc89c6f14dcdcbfee200b0557c59ae243835e42720be143526d834d0e53ed3af"}, + {file = "dulwich-0.22.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbade3342376be1cd2409539fe1b901d2d57a531106bbae204da921ef4456a74"}, + {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71420ffb6deebc59b2ce875e63d814509f9c1dc89c76db962d547aebf15670c7"}, + {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a626adbfac44646a125618266a24133763bdc992bf8bd0702910d67e6b994443"}, + {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f1476c9c4e4ede95714d06c4831883a26680e37b040b8b6230f506e5ba39f51"}, + {file = "dulwich-0.22.8-cp313-cp313-win32.whl", hash = "sha256:b2b31913932bb5bd41658dd398b33b1a2d4d34825123ad54e40912cfdfe60003"}, + {file = "dulwich-0.22.8-cp313-cp313-win_amd64.whl", hash = "sha256:7a44e5a61a7989aca1e301d39cfb62ad2f8853368682f524d6e878b4115d823d"}, + {file = "dulwich-0.22.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9cd0c67fb44a38358b9fcabee948bf11044ef6ce7a129e50962f54c176d084e"}, + {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b79b94726c3f4a9e5a830c649376fd0963236e73142a4290bac6bc9fc9cb120"}, + {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16bbe483d663944972e22d64e1f191201123c3b5580fbdaac6a4f66bfaa4fc11"}, + {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e02d403af23d93dc1f96eb2408e25efd50046e38590a88c86fa4002adc9849b0"}, + {file = "dulwich-0.22.8-cp39-cp39-win32.whl", hash = "sha256:8bdd9543a77fb01be704377f5e634b71f955fec64caa4a493dc3bfb98e3a986e"}, + {file = "dulwich-0.22.8-cp39-cp39-win_amd64.whl", hash = "sha256:3b6757c6b3ba98212b854a766a4157b9cb79a06f4e1b06b46dec4bd834945b8e"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7bb18fa09daa1586c1040b3e2777d38d4212a5cdbe47d384ba66a1ac336fcc4c"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2fda8e87907ed304d4a5962aea0338366144df0df60f950b8f7f125871707f"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1748cd573a0aee4d530bc223a23ccb8bb5b319645931a37bd1cfb68933b720c1"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a631b2309feb9a9631eabd896612ba36532e3ffedccace57f183bb868d7afc06"}, + {file = "dulwich-0.22.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:00e7d9a3d324f9e0a1b27880eec0e8e276ff76519621b66c1a429ca9eb3f5a8d"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f8aa3de93201f9e3e40198725389aa9554a4ee3318a865f96a8e9bc9080f0b25"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e8da9dd8135884975f5be0563ede02179240250e11f11942801ae31ac293f37"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc5ce2435fb3abdf76f1acabe48f2e4b3f7428232cadaef9daaf50ea7fa30ee"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:982b21cc3100d959232cadb3da0a478bd549814dd937104ea50f43694ec27153"}, + {file = "dulwich-0.22.8-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6bde2b13a05cc0ec2ecd4597a99896663544c40af1466121f4d046119b874ce3"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6d446cb7d272a151934ad4b48ba691f32486d5267cf2de04ee3b5e05fc865326"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f6338e6cf95cd76a0191b3637dc3caed1f988ae84d8e75f876d5cd75a8dd81a"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e004fc532ea262f2d5f375068101ca4792becb9d4aa663b050f5ac31fda0bb5c"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bfdbc6fa477dee00d04e22d43a51571cd820cfaaaa886f0f155b8e29b3e3d45"}, + {file = "dulwich-0.22.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ae900c8e573f79d714c1d22b02cdadd50b64286dd7203028f0200f82089e4950"}, + {file = "dulwich-0.22.8-py3-none-any.whl", hash = "sha256:ffc7a02e62b72884de58baaa3b898b7f6427893e79b1289ffa075092efe59181"}, + {file = "dulwich-0.22.8.tar.gz", hash = "sha256:701547310415de300269331abe29cb5717aa1ea377af826bf513d0adfb1c209b"}, +] + +[package.dependencies] +urllib3 = ">=1.25" + +[package.extras] +dev = ["mypy (==1.15.0)", "ruff (==0.9.7)"] +fastimport = ["fastimport"] +https = ["urllib3 (>=1.24.1)"] +paramiko = ["paramiko"] +pgp = ["gpg"] + [[package]] name = "durationpy" version = "0.9" description = "Module for converting between datetime.timedelta and Go's Duration strings." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, @@ -988,6 +1150,7 @@ version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, @@ -1003,6 +1166,7 @@ version = "1.0.1" description = "Simple, generic API for escaping strings." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "escapism-1.0.1-py2.py3-none-any.whl", hash = "sha256:d28f19edc3cb1ffc36fa238956ecc068695477e748f57157c6dde00a6b77f229"}, {file = "escapism-1.0.1.tar.gz", hash = "sha256:73256bdfb4f22230f0428fc6efecee61cdc4fad531b6f98b849cb9c80711e4ec"}, @@ -1014,6 +1178,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -1023,221 +1188,176 @@ files = [ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] -name = "factory-boy" -version = "3.3.1" -description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +name = "fastjsonschema" +version = "2.21.1" +description = "Fastest Python implementation of JSON schema" optional = false -python-versions = ">=3.8" +python-versions = "*" +groups = ["main"] files = [ - {file = "factory_boy-3.3.1-py2.py3-none-any.whl", hash = "sha256:7b1113c49736e1e9995bc2a18f4dbf2c52cf0f841103517010b1d825712ce3ca"}, - {file = "factory_boy-3.3.1.tar.gz", hash = "sha256:8317aa5289cdfc45f9cae570feb07a6177316c82e34d14df3c2e1f22f26abef0"}, + {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, + {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, ] -[package.dependencies] -Faker = ">=0.7.0" - [package.extras] -dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "mongomock", "mypy", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] -doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] -name = "faker" -version = "33.0.0" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Faker-33.0.0-py3-none-any.whl", hash = "sha256:68e5580cb6b4226710886e595eabc13127149d6e71e9d1db65506a7fbe2c7fce"}, - {file = "faker-33.0.0.tar.gz", hash = "sha256:9b01019c1ddaf2253ca2308c0472116e993f4ad8fc9905f82fa965e0c6f932e9"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" -typing-extensions = "*" - -[[package]] -name = "fakeredis" -version = "2.26.1" -description = "Python implementation of redis API, can be used for testing purposes." +name = "filelock" +version = "3.18.0" +description = "A platform independent file lock." optional = false -python-versions = "<4.0,>=3.7" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "fakeredis-2.26.1-py3-none-any.whl", hash = "sha256:68a5615d7ef2529094d6958677e30a6d30d544e203a5ab852985c19d7ad57e32"}, - {file = "fakeredis-2.26.1.tar.gz", hash = "sha256:69f4daafe763c8014a6dbf44a17559c46643c95447b3594b3975251a171b806d"}, + {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, + {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, ] -[package.dependencies] -redis = {version = ">=4.3", markers = "python_full_version > \"3.8.0\""} -sortedcontainers = ">=2,<3" - [package.extras] -bf = ["pyprobables (>=0.6,<0.7)"] -cf = ["pyprobables (>=0.6,<0.7)"] -json = ["jsonpath-ng (>=1.6,<2.0)"] -lua = ["lupa (>=2.1,<3.0)"] -probabilistic = ["pyprobables (>=0.6,<0.7)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] +typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] [[package]] -name = "fastavro" -version = "1.9.7" -description = "Fast read/write of AVRO files" +name = "findpython" +version = "0.6.3" +description = "A utility to find python versions on your system" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "fastavro-1.9.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc811fb4f7b5ae95f969cda910241ceacf82e53014c7c7224df6f6e0ca97f52f"}, - {file = "fastavro-1.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb8749e419a85f251bf1ac87d463311874972554d25d4a0b19f6bdc56036d7cf"}, - {file = "fastavro-1.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b2f9bafa167cb4d1c3dd17565cb5bf3d8c0759e42620280d1760f1e778e07fc"}, - {file = "fastavro-1.9.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e87d04b235b29f7774d226b120da2ca4e60b9e6fdf6747daef7f13f218b3517a"}, - {file = "fastavro-1.9.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b525c363e267ed11810aaad8fbdbd1c3bd8837d05f7360977d72a65ab8c6e1fa"}, - {file = "fastavro-1.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:6312fa99deecc319820216b5e1b1bd2d7ebb7d6f221373c74acfddaee64e8e60"}, - {file = "fastavro-1.9.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ec8499dc276c2d2ef0a68c0f1ad11782b2b956a921790a36bf4c18df2b8d4020"}, - {file = "fastavro-1.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d9d96f98052615ab465c63ba8b76ed59baf2e3341b7b169058db104cbe2aa0"}, - {file = "fastavro-1.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919f3549e07a8a8645a2146f23905955c35264ac809f6c2ac18142bc5b9b6022"}, - {file = "fastavro-1.9.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:9de1fa832a4d9016724cd6facab8034dc90d820b71a5d57c7e9830ffe90f31e4"}, - {file = "fastavro-1.9.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1d09227d1f48f13281bd5ceac958650805aef9a4ef4f95810128c1f9be1df736"}, - {file = "fastavro-1.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:2db993ae6cdc63e25eadf9f93c9e8036f9b097a3e61d19dca42536dcc5c4d8b3"}, - {file = "fastavro-1.9.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4e1289b731214a7315884c74b2ec058b6e84380ce9b18b8af5d387e64b18fc44"}, - {file = "fastavro-1.9.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eac69666270a76a3a1d0444f39752061195e79e146271a568777048ffbd91a27"}, - {file = "fastavro-1.9.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9be089be8c00f68e343bbc64ca6d9a13e5e5b0ba8aa52bcb231a762484fb270e"}, - {file = "fastavro-1.9.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d576eccfd60a18ffa028259500df67d338b93562c6700e10ef68bbd88e499731"}, - {file = "fastavro-1.9.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ee9bf23c157bd7dcc91ea2c700fa3bd924d9ec198bb428ff0b47fa37fe160659"}, - {file = "fastavro-1.9.7-cp312-cp312-win_amd64.whl", hash = "sha256:b6b2ccdc78f6afc18c52e403ee68c00478da12142815c1bd8a00973138a166d0"}, - {file = "fastavro-1.9.7-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:7313def3aea3dacface0a8b83f6d66e49a311149aa925c89184a06c1ef99785d"}, - {file = "fastavro-1.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:536f5644737ad21d18af97d909dba099b9e7118c237be7e4bd087c7abde7e4f0"}, - {file = "fastavro-1.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2af559f30383b79cf7d020a6b644c42ffaed3595f775fe8f3d7f80b1c43dfdc5"}, - {file = "fastavro-1.9.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:edc28ab305e3c424de5ac5eb87b48d1e07eddb6aa08ef5948fcda33cc4d995ce"}, - {file = "fastavro-1.9.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ec2e96bdabd58427fe683329b3d79f42c7b4f4ff6b3644664a345a655ac2c0a1"}, - {file = "fastavro-1.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:3b683693c8a85ede496ebebe115be5d7870c150986e34a0442a20d88d7771224"}, - {file = "fastavro-1.9.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:58f76a5c9a312fbd37b84e49d08eb23094d36e10d43bc5df5187bc04af463feb"}, - {file = "fastavro-1.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56304401d2f4f69f5b498bdd1552c13ef9a644d522d5de0dc1d789cf82f47f73"}, - {file = "fastavro-1.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fcce036c6aa06269fc6a0428050fcb6255189997f5e1a728fc461e8b9d3e26b"}, - {file = "fastavro-1.9.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:17de68aae8c2525f5631d80f2b447a53395cdc49134f51b0329a5497277fc2d2"}, - {file = "fastavro-1.9.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7c911366c625d0a997eafe0aa83ffbc6fd00d8fd4543cb39a97c6f3b8120ea87"}, - {file = "fastavro-1.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:912283ed48578a103f523817fdf0c19b1755cea9b4a6387b73c79ecb8f8f84fc"}, - {file = "fastavro-1.9.7.tar.gz", hash = "sha256:13e11c6cb28626da85290933027cd419ce3f9ab8e45410ef24ce6b89d20a1f6c"}, + {file = "findpython-0.6.3-py3-none-any.whl", hash = "sha256:a85bb589b559cdf1b87227cc233736eb7cad894b9e68021ee498850611939ebc"}, + {file = "findpython-0.6.3.tar.gz", hash = "sha256:5863ea55556d8aadc693481a14ac4f3624952719efc1c5591abb0b4a9e965c94"}, ] -[package.extras] -codecs = ["cramjam", "lz4", "zstandard"] -lz4 = ["lz4"] -snappy = ["cramjam"] -zstandard = ["zstandard"] +[package.dependencies] +packaging = ">=20" [[package]] -name = "filelock" -version = "3.16.1" -description = "A platform independent file lock." +name = "fqdn" +version = "1.5.1" +description = "Validates fully-qualified domain names against RFC 1123, so that they are acceptable to modern bowsers" optional = false -python-versions = ">=3.8" +python-versions = ">=2.7, !=3.0, !=3.1, !=3.2, !=3.3, !=3.4, <4" +groups = ["dev"] files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, + {file = "fqdn-1.5.1-py3-none-any.whl", hash = "sha256:3a179af3761e4df6eb2e026ff9e1a3033d3587bf980a0b1b2e1e5d08d7358014"}, + {file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"}, ] -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] -typing = ["typing-extensions (>=4.12.2)"] - [[package]] name = "frozenlist" -version = "1.5.0" +version = "1.6.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5b6a66c18b5b9dd261ca98dffcb826a525334b2f29e7caa54e182255c5f6a65a"}, - {file = "frozenlist-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d1b3eb7b05ea246510b43a7e53ed1653e55c2121019a97e60cad7efb881a97bb"}, - {file = "frozenlist-1.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:15538c0cbf0e4fa11d1e3a71f823524b0c46299aed6e10ebb4c2089abd8c3bec"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e79225373c317ff1e35f210dd5f1344ff31066ba8067c307ab60254cd3a78ad5"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9272fa73ca71266702c4c3e2d4a28553ea03418e591e377a03b8e3659d94fa76"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:498524025a5b8ba81695761d78c8dd7382ac0b052f34e66939c42df860b8ff17"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:92b5278ed9d50fe610185ecd23c55d8b307d75ca18e94c0e7de328089ac5dcba"}, - {file = "frozenlist-1.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f3c8c1dacd037df16e85227bac13cca58c30da836c6f936ba1df0c05d046d8d"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f2ac49a9bedb996086057b75bf93538240538c6d9b38e57c82d51f75a73409d2"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e66cc454f97053b79c2ab09c17fbe3c825ea6b4de20baf1be28919460dd7877f"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:5a3ba5f9a0dfed20337d3e966dc359784c9f96503674c2faf015f7fe8e96798c"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6321899477db90bdeb9299ac3627a6a53c7399c8cd58d25da094007402b039ab"}, - {file = "frozenlist-1.5.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76e4753701248476e6286f2ef492af900ea67d9706a0155335a40ea21bf3b2f5"}, - {file = "frozenlist-1.5.0-cp310-cp310-win32.whl", hash = "sha256:977701c081c0241d0955c9586ffdd9ce44f7a7795df39b9151cd9a6fd0ce4cfb"}, - {file = "frozenlist-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:189f03b53e64144f90990d29a27ec4f7997d91ed3d01b51fa39d2dbe77540fd4"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fd74520371c3c4175142d02a976aee0b4cb4a7cc912a60586ffd8d5929979b30"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2f3f7a0fbc219fb4455264cae4d9f01ad41ae6ee8524500f381de64ffaa077d5"}, - {file = "frozenlist-1.5.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f47c9c9028f55a04ac254346e92977bf0f166c483c74b4232bee19a6697e4778"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0996c66760924da6e88922756d99b47512a71cfd45215f3570bf1e0b694c206a"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2fe128eb4edeabe11896cb6af88fca5346059f6c8d807e3b910069f39157869"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a8ea951bbb6cacd492e3948b8da8c502a3f814f5d20935aae74b5df2b19cf3d"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de537c11e4aa01d37db0d403b57bd6f0546e71a82347a97c6a9f0dcc532b3a45"}, - {file = "frozenlist-1.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c2623347b933fcb9095841f1cc5d4ff0b278addd743e0e966cb3d460278840d"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cee6798eaf8b1416ef6909b06f7dc04b60755206bddc599f52232606e18179d3"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f5f9da7f5dbc00a604fe74aa02ae7c98bcede8a3b8b9666f9f86fc13993bc71a"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:90646abbc7a5d5c7c19461d2e3eeb76eb0b204919e6ece342feb6032c9325ae9"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:bdac3c7d9b705d253b2ce370fde941836a5f8b3c5c2b8fd70940a3ea3af7f4f2"}, - {file = "frozenlist-1.5.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:03d33c2ddbc1816237a67f66336616416e2bbb6beb306e5f890f2eb22b959cdf"}, - {file = "frozenlist-1.5.0-cp311-cp311-win32.whl", hash = "sha256:237f6b23ee0f44066219dae14c70ae38a63f0440ce6750f868ee08775073f942"}, - {file = "frozenlist-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:0cc974cc93d32c42e7b0f6cf242a6bd941c57c61b618e78b6c0a96cb72788c1d"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:31115ba75889723431aa9a4e77d5f398f5cf976eea3bdf61749731f62d4a4a21"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7437601c4d89d070eac8323f121fcf25f88674627505334654fd027b091db09d"}, - {file = "frozenlist-1.5.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7948140d9f8ece1745be806f2bfdf390127cf1a763b925c4a805c603df5e697e"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:feeb64bc9bcc6b45c6311c9e9b99406660a9c05ca8a5b30d14a78555088b0b3a"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:683173d371daad49cffb8309779e886e59c2f369430ad28fe715f66d08d4ab1a"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7d57d8f702221405a9d9b40f9da8ac2e4a1a8b5285aac6100f3393675f0a85ee"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c72000fbcc35b129cb09956836c7d7abf78ab5416595e4857d1cae8d6251a6"}, - {file = "frozenlist-1.5.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:000a77d6034fbad9b6bb880f7ec073027908f1b40254b5d6f26210d2dab1240e"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:5d7f5a50342475962eb18b740f3beecc685a15b52c91f7d975257e13e029eca9"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:87f724d055eb4785d9be84e9ebf0f24e392ddfad00b3fe036e43f489fafc9039"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:6e9080bb2fb195a046e5177f10d9d82b8a204c0736a97a153c2466127de87784"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b93d7aaa36c966fa42efcaf716e6b3900438632a626fb09c049f6a2f09fc631"}, - {file = "frozenlist-1.5.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:52ef692a4bc60a6dd57f507429636c2af8b6046db8b31b18dac02cbc8f507f7f"}, - {file = "frozenlist-1.5.0-cp312-cp312-win32.whl", hash = "sha256:29d94c256679247b33a3dc96cce0f93cbc69c23bf75ff715919332fdbb6a32b8"}, - {file = "frozenlist-1.5.0-cp312-cp312-win_amd64.whl", hash = "sha256:8969190d709e7c48ea386db202d708eb94bdb29207a1f269bab1196ce0dcca1f"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a1a048f9215c90973402e26c01d1cff8a209e1f1b53f72b95c13db61b00f953"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dd47a5181ce5fcb463b5d9e17ecfdb02b678cca31280639255ce9d0e5aa67af0"}, - {file = "frozenlist-1.5.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1431d60b36d15cda188ea222033eec8e0eab488f39a272461f2e6d9e1a8e63c2"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6482a5851f5d72767fbd0e507e80737f9c8646ae7fd303def99bfe813f76cf7f"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:44c49271a937625619e862baacbd037a7ef86dd1ee215afc298a417ff3270608"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:12f78f98c2f1c2429d42e6a485f433722b0061d5c0b0139efa64f396efb5886b"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce3aa154c452d2467487765e3adc730a8c153af77ad84096bc19ce19a2400840"}, - {file = "frozenlist-1.5.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b7dc0c4338e6b8b091e8faf0db3168a37101943e687f373dce00959583f7439"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:45e0896250900b5aa25180f9aec243e84e92ac84bd4a74d9ad4138ef3f5c97de"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:561eb1c9579d495fddb6da8959fd2a1fca2c6d060d4113f5844b433fc02f2641"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:df6e2f325bfee1f49f81aaac97d2aa757c7646534a06f8f577ce184afe2f0a9e"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:140228863501b44b809fb39ec56b5d4071f4d0aa6d216c19cbb08b8c5a7eadb9"}, - {file = "frozenlist-1.5.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7707a25d6a77f5d27ea7dc7d1fc608aa0a478193823f88511ef5e6b8a48f9d03"}, - {file = "frozenlist-1.5.0-cp313-cp313-win32.whl", hash = "sha256:31a9ac2b38ab9b5a8933b693db4939764ad3f299fcaa931a3e605bc3460e693c"}, - {file = "frozenlist-1.5.0-cp313-cp313-win_amd64.whl", hash = "sha256:11aabdd62b8b9c4b84081a3c246506d1cddd2dd93ff0ad53ede5defec7886b28"}, - {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:dd94994fc91a6177bfaafd7d9fd951bc8689b0a98168aa26b5f543868548d3ca"}, - {file = "frozenlist-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0da8bbec082bf6bf18345b180958775363588678f64998c2b7609e34719b10"}, - {file = "frozenlist-1.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73f2e31ea8dd7df61a359b731716018c2be196e5bb3b74ddba107f694fbd7604"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828afae9f17e6de596825cf4228ff28fbdf6065974e5ac1410cecc22f699d2b3"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1577515d35ed5649d52ab4319db757bb881ce3b2b796d7283e6634d99ace307"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2150cc6305a2c2ab33299453e2968611dacb970d2283a14955923062c8d00b10"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a72b7a6e3cd2725eff67cd64c8f13335ee18fc3c7befc05aed043d24c7b9ccb9"}, - {file = "frozenlist-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c16d2fa63e0800723139137d667e1056bee1a1cf7965153d2d104b62855e9b99"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:17dcc32fc7bda7ce5875435003220a457bcfa34ab7924a49a1c19f55b6ee185c"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:97160e245ea33d8609cd2b8fd997c850b56db147a304a262abc2b3be021a9171"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:f1e6540b7fa044eee0bb5111ada694cf3dc15f2b0347ca125ee9ca984d5e9e6e"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:91d6c171862df0a6c61479d9724f22efb6109111017c87567cfeb7b5d1449fdf"}, - {file = "frozenlist-1.5.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c1fac3e2ace2eb1052e9f7c7db480818371134410e1f5c55d65e8f3ac6d1407e"}, - {file = "frozenlist-1.5.0-cp38-cp38-win32.whl", hash = "sha256:b97f7b575ab4a8af9b7bc1d2ef7f29d3afee2226bd03ca3875c16451ad5a7723"}, - {file = "frozenlist-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:374ca2dabdccad8e2a76d40b1d037f5bd16824933bf7bcea3e59c891fd4a0923"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9bbcdfaf4af7ce002694a4e10a0159d5a8d20056a12b05b45cea944a4953f972"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1893f948bf6681733aaccf36c5232c231e3b5166d607c5fa77773611df6dc336"}, - {file = "frozenlist-1.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2b5e23253bb709ef57a8e95e6ae48daa9ac5f265637529e4ce6b003a37b2621f"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f253985bb515ecd89629db13cb58d702035ecd8cfbca7d7a7e29a0e6d39af5f"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:04a5c6babd5e8fb7d3c871dc8b321166b80e41b637c31a995ed844a6139942b6"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9fe0f1c29ba24ba6ff6abf688cb0b7cf1efab6b6aa6adc55441773c252f7411"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:226d72559fa19babe2ccd920273e767c96a49b9d3d38badd7c91a0fdeda8ea08"}, - {file = "frozenlist-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15b731db116ab3aedec558573c1a5eec78822b32292fe4f2f0345b7f697745c2"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:366d8f93e3edfe5a918c874702f78faac300209a4d5bf38352b2c1bdc07a766d"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1b96af8c582b94d381a1c1f51ffaedeb77c821c690ea5f01da3d70a487dd0a9b"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:c03eff4a41bd4e38415cbed054bbaff4a075b093e2394b6915dca34a40d1e38b"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:50cf5e7ee9b98f22bdecbabf3800ae78ddcc26e4a435515fc72d97903e8488e0"}, - {file = "frozenlist-1.5.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1e76bfbc72353269c44e0bc2cfe171900fbf7f722ad74c9a7b638052afe6a00c"}, - {file = "frozenlist-1.5.0-cp39-cp39-win32.whl", hash = "sha256:666534d15ba8f0fda3f53969117383d5dc021266b3c1a42c9ec4855e4b58b9d3"}, - {file = "frozenlist-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:5c28f4b5dbef8a0d8aad0d4de24d1e9e981728628afaf4ea0792f5d0939372f0"}, - {file = "frozenlist-1.5.0-py3-none-any.whl", hash = "sha256:d994863bba198a4a518b467bb971c56e1db3f180a25c6cf7bb1949c267f748c3"}, - {file = "frozenlist-1.5.0.tar.gz", hash = "sha256:81d5af29e61b9c8348e876d442253723928dce6433e0e76cd925cd83f1b4b817"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3"}, + {file = "frozenlist-1.6.0-cp310-cp310-win32.whl", hash = "sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812"}, + {file = "frozenlist-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e"}, + {file = "frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860"}, + {file = "frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770"}, + {file = "frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc"}, + {file = "frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e"}, + {file = "frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4"}, + {file = "frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c"}, + {file = "frozenlist-1.6.0-cp39-cp39-win32.whl", hash = "sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530"}, + {file = "frozenlist-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572"}, + {file = "frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191"}, + {file = "frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68"}, ] [[package]] @@ -1246,6 +1366,7 @@ version = "1.3.0" description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, @@ -1253,13 +1374,14 @@ files = [ [[package]] name = "google-auth" -version = "2.36.0" +version = "2.39.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "google_auth-2.36.0-py2.py3-none-any.whl", hash = "sha256:51a15d47028b66fd36e5c64a82d2d57480075bccc7da37cde257fc94177a61fb"}, - {file = "google_auth-2.36.0.tar.gz", hash = "sha256:545e9618f2df0bcbb7dcbc45a546485b1212624716975a1ea5ae8149ce769ab1"}, + {file = "google_auth-2.39.0-py2.py3-none-any.whl", hash = "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2"}, + {file = "google_auth-2.39.0.tar.gz", hash = "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"}, ] [package.dependencies] @@ -1268,120 +1390,109 @@ pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] enterprise-cert = ["cryptography", "pyopenssl"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] [[package]] name = "googleapis-common-protos" -version = "1.66.0" +version = "1.70.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "googleapis_common_protos-1.66.0-py2.py3-none-any.whl", hash = "sha256:d7abcd75fabb2e0ec9f74466401f6c119a0b498e27370e9be4c94cb7e382b8ed"}, - {file = "googleapis_common_protos-1.66.0.tar.gz", hash = "sha256:c3e7b33d15fdca5374cc0a7346dd92ffa847425cc4ea941d970f13680052ec8c"}, + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, ] [package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] [[package]] name = "graphql-core" -version = "3.2.5" +version = "3.2.6" description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." optional = false python-versions = "<4,>=3.6" +groups = ["dev"] files = [ - {file = "graphql_core-3.2.5-py3-none-any.whl", hash = "sha256:2f150d5096448aa4f8ab26268567bbfeef823769893b39c1a2e1409590939c8a"}, - {file = "graphql_core-3.2.5.tar.gz", hash = "sha256:e671b90ed653c808715645e3998b7ab67d382d55467b7e2978549111bbabf8d5"}, + {file = "graphql_core-3.2.6-py3-none-any.whl", hash = "sha256:78b016718c161a6fb20a7d97bbf107f331cd1afe53e45566c59f776ed7f0b45f"}, + {file = "graphql_core-3.2.6.tar.gz", hash = "sha256:c08eec22f9e40f0bd61d805907e3b3b1b9a320bc606e23dc145eebca07c8fbab"}, ] [[package]] name = "greenlet" -version = "3.1.1" +version = "3.2.1" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, - {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +files = [ + {file = "greenlet-3.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:777c1281aa7c786738683e302db0f55eb4b0077c20f1dc53db8852ffaea0a6b0"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3059c6f286b53ea4711745146ffe5a5c5ff801f62f6c56949446e0f6461f8157"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1a40a17e2c7348f5eee5d8e1b4fa6a937f0587eba89411885a36a8e1fc29bd2"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5193135b3a8d0017cb438de0d49e92bf2f6c1c770331d24aa7500866f4db4017"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639a94d001fe874675b553f28a9d44faed90f9864dc57ba0afef3f8d76a18b04"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fe303381e7e909e42fb23e191fc69659910909fdcd056b92f6473f80ef18543"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:72c9b668454e816b5ece25daac1a42c94d1c116d5401399a11b77ce8d883110c"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6079ae990bbf944cf66bea64a09dcb56085815630955109ffa98984810d71565"}, + {file = "greenlet-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:e63cd2035f49376a23611fbb1643f78f8246e9d4dfd607534ec81b175ce582c2"}, + {file = "greenlet-3.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:aa30066fd6862e1153eaae9b51b449a6356dcdb505169647f69e6ce315b9468b"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b0f3a0a67786facf3b907a25db80efe74310f9d63cc30869e49c79ee3fcef7e"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64a4d0052de53ab3ad83ba86de5ada6aeea8f099b4e6c9ccce70fb29bc02c6a2"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852ef432919830022f71a040ff7ba3f25ceb9fe8f3ab784befd747856ee58530"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4818116e75a0dd52cdcf40ca4b419e8ce5cb6669630cb4f13a6c384307c9543f"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9afa05fe6557bce1642d8131f87ae9462e2a8e8c46f7ed7929360616088a3975"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5c12f0d17a88664757e81a6e3fc7c2452568cf460a2f8fb44f90536b2614000b"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dbb4e1aa2000852937dd8f4357fb73e3911da426df8ca9b8df5db231922da474"}, + {file = "greenlet-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:cb5ee928ce5fedf9a4b0ccdc547f7887136c4af6109d8f2fe8e00f90c0db47f5"}, + {file = "greenlet-3.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0ba2811509a30e5f943be048895a983a8daf0b9aa0ac0ead526dfb5d987d80ea"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4245246e72352b150a1588d43ddc8ab5e306bef924c26571aafafa5d1aaae4e8"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7abc0545d8e880779f0c7ce665a1afc3f72f0ca0d5815e2b006cafc4c1cc5840"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dcc6d604a6575c6225ac0da39df9335cc0c6ac50725063fa90f104f3dbdb2c9"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2273586879affca2d1f414709bb1f61f0770adcabf9eda8ef48fd90b36f15d12"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff38c869ed30fff07f1452d9a204ece1ec6d3c0870e0ba6e478ce7c1515acf22"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e934591a7a4084fa10ee5ef50eb9d2ac8c4075d5c9cf91128116b5dca49d43b1"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:063bcf7f8ee28eb91e7f7a8148c65a43b73fbdc0064ab693e024b5a940070145"}, + {file = "greenlet-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7132e024ebeeeabbe661cf8878aac5d2e643975c4feae833142592ec2f03263d"}, + {file = "greenlet-3.2.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:e1967882f0c42eaf42282a87579685c8673c51153b845fde1ee81be720ae27ac"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77ae69032a95640a5fe8c857ec7bee569a0997e809570f4c92048691ce4b437"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3227c6ec1149d4520bc99edac3b9bc8358d0034825f3ca7572165cb502d8f29a"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ddda0197c5b46eedb5628d33dad034c455ae77708c7bf192686e760e26d6a0c"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de62b542e5dcf0b6116c310dec17b82bb06ef2ceb696156ff7bf74a7a498d982"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c07a0c01010df42f1f058b3973decc69c4d82e036a951c3deaf89ab114054c07"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2530bfb0abcd451ea81068e6d0a1aac6dabf3f4c23c8bd8e2a8f579c2dd60d95"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c472adfca310f849903295c351d297559462067f618944ce2650a1878b84123"}, + {file = "greenlet-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:24a496479bc8bd01c39aa6516a43c717b4cee7196573c47b1f8e1011f7c12495"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175d583f7d5ee57845591fc30d852b75b144eb44b05f38b67966ed6df05c8526"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecc9d33ca9428e4536ea53e79d781792cee114d2fa2695b173092bdbd8cd6d5"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f56382ac4df3860ebed8ed838f268f03ddf4e459b954415534130062b16bc32"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc45a7189c91c0f89aaf9d69da428ce8301b0fd66c914a499199cfb0c28420fc"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51a2f49da08cff79ee42eb22f1658a2aed60c72792f0a0a95f5f0ca6d101b1fb"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:0c68bbc639359493420282d2f34fa114e992a8724481d700da0b10d10a7611b8"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:e775176b5c203a1fa4be19f91da00fd3bff536868b77b237da3f4daa5971ae5d"}, + {file = "greenlet-3.2.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d6668caf15f181c1b82fb6406f3911696975cc4c37d782e19cb7ba499e556189"}, + {file = "greenlet-3.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:17964c246d4f6e1327edd95e2008988a8995ae3a7732be2f9fc1efed1f1cdf8c"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b4ec7f65f0e4a1500ac475c9343f6cc022b2363ebfb6e94f416085e40dea15"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b38d53cf268da963869aa25a6e4cc84c1c69afc1ae3391738b2603d110749d01"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a7490f74e8aabc5f29256765a99577ffde979920a2db1f3676d265a3adba41"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4339b202ac20a89ccd5bde0663b4d00dc62dd25cb3fb14f7f3034dec1b0d9ece"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a750f1046994b9e038b45ae237d68153c29a3a783075211fb1414a180c8324b"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:374ffebaa5fbd10919cd599e5cf8ee18bae70c11f9d61e73db79826c8c93d6f9"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b89e5d44f55372efc6072f59ced5ed1efb7b44213dab5ad7e0caba0232c6545"}, + {file = "greenlet-3.2.1-cp39-cp39-win32.whl", hash = "sha256:b7503d6b8bbdac6bbacf5a8c094f18eab7553481a1830975799042f26c9e101b"}, + {file = "greenlet-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:e98328b8b8f160925d6b1c5b1879d8e64f6bd8cf11472b7127d579da575b77d9"}, + {file = "greenlet-3.2.1.tar.gz", hash = "sha256:9f4dd4b4946b14bb3bf038f81e1d2e535b7d94f1b2a59fdba1293cd9c1a0a4d7"}, ] [package.extras] @@ -1394,6 +1505,7 @@ version = "0.15.4" description = "Simplifies gRPC interceptors" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926"}, {file = "grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d"}, @@ -1407,70 +1519,67 @@ testing = ["protobuf (>=4.21.9)"] [[package]] name = "grpcio" -version = "1.68.0" +version = "1.71.0" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.68.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:619b5d0f29f4f5351440e9343224c3e19912c21aeda44e0c49d0d147a8d01544"}, - {file = "grpcio-1.68.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:a59f5822f9459bed098ffbceb2713abbf7c6fd13f2b9243461da5c338d0cd6c3"}, - {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:c03d89df516128febc5a7e760d675b478ba25802447624edf7aa13b1e7b11e2a"}, - {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44bcbebb24363d587472089b89e2ea0ab2e2b4df0e4856ba4c0b087c82412121"}, - {file = "grpcio-1.68.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79f81b7fbfb136247b70465bd836fa1733043fdee539cd6031cb499e9608a110"}, - {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:88fb2925789cfe6daa20900260ef0a1d0a61283dfb2d2fffe6194396a354c618"}, - {file = "grpcio-1.68.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:99f06232b5c9138593ae6f2e355054318717d32a9c09cdc5a2885540835067a1"}, - {file = "grpcio-1.68.0-cp310-cp310-win32.whl", hash = "sha256:a6213d2f7a22c3c30a479fb5e249b6b7e648e17f364598ff64d08a5136fe488b"}, - {file = "grpcio-1.68.0-cp310-cp310-win_amd64.whl", hash = "sha256:15327ab81131ef9b94cb9f45b5bd98803a179c7c61205c8c0ac9aff9d6c4e82a"}, - {file = "grpcio-1.68.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:3b2b559beb2d433129441783e5f42e3be40a9e1a89ec906efabf26591c5cd415"}, - {file = "grpcio-1.68.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e46541de8425a4d6829ac6c5d9b16c03c292105fe9ebf78cb1c31e8d242f9155"}, - {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:c1245651f3c9ea92a2db4f95d37b7597db6b246d5892bca6ee8c0e90d76fb73c"}, - {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1931c7aa85be0fa6cea6af388e576f3bf6baee9e5d481c586980c774debcb4"}, - {file = "grpcio-1.68.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b0ff09c81e3aded7a183bc6473639b46b6caa9c1901d6f5e2cba24b95e59e30"}, - {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8c73f9fbbaee1a132487e31585aa83987ddf626426d703ebcb9a528cf231c9b1"}, - {file = "grpcio-1.68.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6b2f98165ea2790ea159393a2246b56f580d24d7da0d0342c18a085299c40a75"}, - {file = "grpcio-1.68.0-cp311-cp311-win32.whl", hash = "sha256:e1e7ed311afb351ff0d0e583a66fcb39675be112d61e7cfd6c8269884a98afbc"}, - {file = "grpcio-1.68.0-cp311-cp311-win_amd64.whl", hash = "sha256:e0d2f68eaa0a755edd9a47d40e50dba6df2bceda66960dee1218da81a2834d27"}, - {file = "grpcio-1.68.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8af6137cc4ae8e421690d276e7627cfc726d4293f6607acf9ea7260bd8fc3d7d"}, - {file = "grpcio-1.68.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4028b8e9a3bff6f377698587d642e24bd221810c06579a18420a17688e421af7"}, - {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:f60fa2adf281fd73ae3a50677572521edca34ba373a45b457b5ebe87c2d01e1d"}, - {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e18589e747c1e70b60fab6767ff99b2d0c359ea1db8a2cb524477f93cdbedf5b"}, - {file = "grpcio-1.68.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0d30f3fee9372796f54d3100b31ee70972eaadcc87314be369360248a3dcffe"}, - {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:7e0a3e72c0e9a1acab77bef14a73a416630b7fd2cbd893c0a873edc47c42c8cd"}, - {file = "grpcio-1.68.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a831dcc343440969aaa812004685ed322cdb526cd197112d0db303b0da1e8659"}, - {file = "grpcio-1.68.0-cp312-cp312-win32.whl", hash = "sha256:5a180328e92b9a0050958ced34dddcb86fec5a8b332f5a229e353dafc16cd332"}, - {file = "grpcio-1.68.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bddd04a790b69f7a7385f6a112f46ea0b34c4746f361ebafe9ca0be567c78e9"}, - {file = "grpcio-1.68.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:fc05759ffbd7875e0ff2bd877be1438dfe97c9312bbc558c8284a9afa1d0f40e"}, - {file = "grpcio-1.68.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:15fa1fe25d365a13bc6d52fcac0e3ee1f9baebdde2c9b3b2425f8a4979fccea1"}, - {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:32a9cb4686eb2e89d97022ecb9e1606d132f85c444354c17a7dbde4a455e4a3b"}, - {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dba037ff8d284c8e7ea9a510c8ae0f5b016004f13c3648f72411c464b67ff2fb"}, - {file = "grpcio-1.68.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0efbbd849867e0e569af09e165363ade75cf84f5229b2698d53cf22c7a4f9e21"}, - {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:4e300e6978df0b65cc2d100c54e097c10dfc7018b9bd890bbbf08022d47f766d"}, - {file = "grpcio-1.68.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:6f9c7ad1a23e1047f827385f4713b5b8c6c7d325705be1dd3e31fb00dcb2f665"}, - {file = "grpcio-1.68.0-cp313-cp313-win32.whl", hash = "sha256:3ac7f10850fd0487fcce169c3c55509101c3bde2a3b454869639df2176b60a03"}, - {file = "grpcio-1.68.0-cp313-cp313-win_amd64.whl", hash = "sha256:afbf45a62ba85a720491bfe9b2642f8761ff348006f5ef67e4622621f116b04a"}, - {file = "grpcio-1.68.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:f8f695d9576ce836eab27ba7401c60acaf9ef6cf2f70dfe5462055ba3df02cc3"}, - {file = "grpcio-1.68.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9fe1b141cda52f2ca73e17d2d3c6a9f3f3a0c255c216b50ce616e9dca7e3441d"}, - {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:4df81d78fd1646bf94ced4fb4cd0a7fe2e91608089c522ef17bc7db26e64effd"}, - {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46a2d74d4dd8993151c6cd585594c082abe74112c8e4175ddda4106f2ceb022f"}, - {file = "grpcio-1.68.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a17278d977746472698460c63abf333e1d806bd41f2224f90dbe9460101c9796"}, - {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:15377bce516b1c861c35e18eaa1c280692bf563264836cece693c0f169b48829"}, - {file = "grpcio-1.68.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc5f0a4f5904b8c25729a0498886b797feb817d1fd3812554ffa39551112c161"}, - {file = "grpcio-1.68.0-cp38-cp38-win32.whl", hash = "sha256:def1a60a111d24376e4b753db39705adbe9483ef4ca4761f825639d884d5da78"}, - {file = "grpcio-1.68.0-cp38-cp38-win_amd64.whl", hash = "sha256:55d3b52fd41ec5772a953612db4e70ae741a6d6ed640c4c89a64f017a1ac02b5"}, - {file = "grpcio-1.68.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:0d230852ba97654453d290e98d6aa61cb48fa5fafb474fb4c4298d8721809354"}, - {file = "grpcio-1.68.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:50992f214264e207e07222703c17d9cfdcc2c46ed5a1ea86843d440148ebbe10"}, - {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:14331e5c27ed3545360464a139ed279aa09db088f6e9502e95ad4bfa852bb116"}, - {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f84890b205692ea813653ece4ac9afa2139eae136e419231b0eec7c39fdbe4c2"}, - {file = "grpcio-1.68.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0cf343c6f4f6aa44863e13ec9ddfe299e0be68f87d68e777328bff785897b05"}, - {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:fd2c2d47969daa0e27eadaf15c13b5e92605c5e5953d23c06d0b5239a2f176d3"}, - {file = "grpcio-1.68.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:18668e36e7f4045820f069997834e94e8275910b1f03e078a6020bd464cb2363"}, - {file = "grpcio-1.68.0-cp39-cp39-win32.whl", hash = "sha256:2af76ab7c427aaa26aa9187c3e3c42f38d3771f91a20f99657d992afada2294a"}, - {file = "grpcio-1.68.0-cp39-cp39-win_amd64.whl", hash = "sha256:e694b5928b7b33ca2d3b4d5f9bf8b5888906f181daff6b406f4938f3a997a490"}, - {file = "grpcio-1.68.0.tar.gz", hash = "sha256:7e7483d39b4a4fddb9906671e9ea21aaad4f031cdfc349fec76bdfa1e404543a"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.68.0)"] +protobuf = ["grpcio-tools (>=1.71.0)"] [[package]] name = "h11" @@ -1478,17 +1587,37 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, ] +[[package]] +name = "harfile" +version = "0.3.0" +description = "Writer for HTTP Archive (HAR) files" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "harfile-0.3.0-py3-none-any.whl", hash = "sha256:ac11177e06c88c9553c8c73c16ab20428a176d1d2ebe00b41ce527ff0bdc47e6"}, + {file = "harfile-0.3.0.tar.gz", hash = "sha256:23be8037e1296bb4787a15543a37835ed91f408c8296988f9ba022a44accad9e"}, +] + +[package.extras] +bench = ["pytest-codspeed (==2.2.1)"] +cov = ["coverage-enable-subprocess", "coverage[toml] (>=7)"] +dev = ["coverage (>=7)", "coverage-enable-subprocess", "coverage[toml] (>=7)", "hypothesis (>=6)", "hypothesis-jsonschema (>=0.23.1)", "jsonschema (>=4.18.0)", "pytest (>=6.2.0,<8)", "pytest-codspeed (==2.2.1)"] +tests = ["coverage (>=7)", "hypothesis (>=6)", "hypothesis-jsonschema (>=0.23.1)", "jsonschema (>=4.18.0)", "pytest (>=6.2.0,<8)"] + [[package]] name = "html5tagger" version = "1.3.0" description = "Pythonic HTML generation/templating (no template files)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}, {file = "html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}, @@ -1496,13 +1625,14 @@ files = [ [[package]] name = "httpcore" -version = "1.0.7" +version = "1.0.8" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, + {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, + {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, ] [package.dependencies] @@ -1521,6 +1651,7 @@ version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, @@ -1572,13 +1703,14 @@ test = ["Cython (>=0.29.24)"] [[package]] name = "httpx" -version = "0.28.0" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc"}, - {file = "httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -1588,7 +1720,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1596,13 +1728,14 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "httpx-ws" -version = "0.6.2" +version = "0.7.2" description = "WebSockets support for HTTPX" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "httpx_ws-0.6.2-py3-none-any.whl", hash = "sha256:24f87427acb757ada200aeab016cc429fa0bc71b0730429c37634867194e305c"}, - {file = "httpx_ws-0.6.2.tar.gz", hash = "sha256:b07446b9067a30f1012fa9851fdfd14207012cd657c485565884f90553d0854c"}, + {file = "httpx_ws-0.7.2-py3-none-any.whl", hash = "sha256:dd7bf9dbaa96dcd5cef1af3a7e1130cfac068bebecce25a74145022f5a8427a3"}, + {file = "httpx_ws-0.7.2.tar.gz", hash = "sha256:93edea6c8fc313464fc287bff7d2ad20e6196b7754c76f946f73b4af79886d4e"}, ] [package.dependencies] @@ -1613,13 +1746,14 @@ wsproto = "*" [[package]] name = "hypothesis" -version = "6.119.4" +version = "6.131.8" description = "A library for property-based testing" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "hypothesis-6.119.4-py3-none-any.whl", hash = "sha256:333958da7855048850c3d2b6a929d44a3c89ca9eafcfddcacc3570140915eba5"}, - {file = "hypothesis-6.119.4.tar.gz", hash = "sha256:1a7d12709c0e96c1d85aca76d1594b34b5958623e00511592eba674acd4f3392"}, + {file = "hypothesis-6.131.8-py3-none-any.whl", hash = "sha256:fdae34221ed072c6e631ff472fba98806f16d93a483a6ab270ba96ecfd767dd4"}, + {file = "hypothesis-6.131.8.tar.gz", hash = "sha256:68dab267df9e6e06e404c2ac5d5746438526d4687895e5a47ae9bade4a8ca4f3"}, ] [package.dependencies] @@ -1627,10 +1761,10 @@ attrs = ">=22.2.0" sortedcontainers = ">=2.1.0,<3.0.0" [package.extras] -all = ["black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.77)", "django (>=4.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.18)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.19.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2024.2)"] +all = ["black (>=19.10b0)", "click (>=7.0)", "crosshair-tool (>=0.0.86)", "django (>=4.2)", "dpcontracts (>=0.4)", "hypothesis-crosshair (>=0.0.22)", "lark (>=0.10.1)", "libcst (>=0.3.16)", "numpy (>=1.19.3)", "pandas (>=1.1)", "pytest (>=4.6)", "python-dateutil (>=1.4)", "pytz (>=2014.1)", "redis (>=3.0.0)", "rich (>=9.0.0)", "tzdata (>=2025.2) ; sys_platform == \"win32\" or sys_platform == \"emscripten\"", "watchdog (>=4.0.0)"] cli = ["black (>=19.10b0)", "click (>=7.0)", "rich (>=9.0.0)"] codemods = ["libcst (>=0.3.16)"] -crosshair = ["crosshair-tool (>=0.0.77)", "hypothesis-crosshair (>=0.0.18)"] +crosshair = ["crosshair-tool (>=0.0.86)", "hypothesis-crosshair (>=0.0.22)"] dateutil = ["python-dateutil (>=1.4)"] django = ["django (>=4.2)"] dpcontracts = ["dpcontracts (>=0.4)"] @@ -1641,7 +1775,8 @@ pandas = ["pandas (>=1.1)"] pytest = ["pytest (>=4.6)"] pytz = ["pytz (>=2014.1)"] redis = ["redis (>=3.0.0)"] -zoneinfo = ["tzdata (>=2024.2)"] +watchdog = ["watchdog (>=4.0.0)"] +zoneinfo = ["tzdata (>=2025.2) ; sys_platform == \"win32\" or sys_platform == \"emscripten\""] [[package]] name = "hypothesis-graphql" @@ -1649,6 +1784,7 @@ version = "0.11.1" description = "Hypothesis strategies for GraphQL queries" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "hypothesis_graphql-0.11.1-py3-none-any.whl", hash = "sha256:a6968f703bcdc31fbe1b26be69185aa2c824eb3b478057a66aa85967c81cadca"}, {file = "hypothesis_graphql-0.11.1.tar.gz", hash = "sha256:bd49ab6804a3f488ecab2e39c20dba6dfc2101525c6742f5831cfa9eff95285a"}, @@ -1669,6 +1805,7 @@ version = "0.23.1" description = "Generate test data from JSON schemata with Hypothesis" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "hypothesis-jsonschema-0.23.1.tar.gz", hash = "sha256:f4ac032024342a4149a10253984f5a5736b82b3fe2afb0888f3834a31153f215"}, {file = "hypothesis_jsonschema-0.23.1-py3-none-any.whl", hash = "sha256:a4d74d9516dd2784fbbae82e009f62486c9104ac6f4e3397091d98a1d5ee94a2"}, @@ -1680,13 +1817,14 @@ jsonschema = ">=4.18.0" [[package]] name = "identify" -version = "2.6.2" +version = "2.6.10" description = "File identification library for Python" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "identify-2.6.2-py2.py3-none-any.whl", hash = "sha256:c097384259f49e372f4ea00a19719d95ae27dd5ff0fd77ad630aa891306b82f3"}, - {file = "identify-2.6.2.tar.gz", hash = "sha256:fab5c716c24d7a789775228823797296a2994b075fb6080ac83a102772a98cbd"}, + {file = "identify-2.6.10-py2.py3-none-any.whl", hash = "sha256:5f34248f54136beed1a7ba6a6b5c4b6cf21ff495aac7c359e1ef831ae3b8ab25"}, + {file = "identify-2.6.10.tar.gz", hash = "sha256:45e92fd704f3da71cc3880036633f48b4b7265fd4de2b57627cb157216eb7eb8"}, ] [package.extras] @@ -1698,6 +1836,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main", "dev"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1712,6 +1851,7 @@ version = "5.6.2" description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238"}, {file = "inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9"}, @@ -1719,53 +1859,148 @@ files = [ [package.extras] docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] [[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, ] [[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" +name = "installer" +version = "0.7.0" +description = "A library for installing Python wheels." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, + {file = "installer-0.7.0-py3-none-any.whl", hash = "sha256:05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53"}, + {file = "installer-0.7.0.tar.gz", hash = "sha256:a26d3e3116289bb08216e0d0f7d925fcef0b0194eedfa0c944bcaaa106c4b631"}, ] +[[package]] +name = "isoduration" +version = "20.11.0" +description = "Operations with ISO 8601 durations" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "isoduration-20.11.0-py3-none-any.whl", hash = "sha256:b2904c2a4228c3d44f409c8ae8e2370eb21a26f7ac2ec5446df141dde3452042"}, + {file = "isoduration-20.11.0.tar.gz", hash = "sha256:ac2f9015137935279eac671f94f89eb00584f940f5dc49462a0c4ee692ba1bd9"}, +] + +[package.dependencies] +arrow = ">=0.15.0" + [[package]] name = "isort" -version = "5.13.2" +version = "6.0.1" description = "A Python utility / library to sort Python imports." optional = false -python-versions = ">=3.8.0" +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615"}, + {file = "isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450"}, +] + +[package.extras] +colors = ["colorama"] +plugins = ["setuptools"] + +[[package]] +name = "jaraco-classes" +version = "3.4.0" +description = "Utility functions for Python class constructs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, + {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, +] + +[package.dependencies] +more-itertools = "*" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] + +[[package]] +name = "jaraco-context" +version = "6.0.1" +description = "Useful decorators and context managers" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"}, + {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"}, +] + +[package.extras] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] + +[[package]] +name = "jaraco-functools" +version = "4.1.0" +description = "Functools like those found in stdlib" +optional = false +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, + {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, + {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, ] +[package.dependencies] +more-itertools = "*" + [package.extras] -colors = ["colorama (>=0.4.6)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] +type = ["pytest-mypy"] + +[[package]] +name = "jeepney" +version = "0.9.0" +description = "Low-level, pure Python DBus protocol wrapper." +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "sys_platform == \"linux\"" +files = [ + {file = "jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683"}, + {file = "jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732"}, +] + +[package.extras] +test = ["async-timeout ; python_version < \"3.11\"", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] +trio = ["trio"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1774,12 +2009,37 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "jsonpointer" +version = "3.0.0" +description = "Identify specific nodes in a JSON document (RFC 6901)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "jsonpointer-3.0.0-py2.py3-none-any.whl", hash = "sha256:13e088adc14fca8b6aa8177c044e12701e6ad4b28ff10e65f2267a90109c9942"}, + {file = "jsonpointer-3.0.0.tar.gz", hash = "sha256:2b2d729f2091522d61c3b31f82e11870f60b68f43fbc705cb76bf4b832af59ef"}, +] + [[package]] name = "jsonschema" version = "4.23.0" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, @@ -1787,9 +2047,17 @@ files = [ [package.dependencies] attrs = ">=22.2.0" +fqdn = {version = "*", optional = true, markers = "extra == \"format\""} +idna = {version = "*", optional = true, markers = "extra == \"format\""} +isoduration = {version = "*", optional = true, markers = "extra == \"format\""} +jsonpointer = {version = ">1.13", optional = true, markers = "extra == \"format\""} jsonschema-specifications = ">=2023.03.6" referencing = ">=0.28.4" +rfc3339-validator = {version = "*", optional = true, markers = "extra == \"format\""} +rfc3987 = {version = "*", optional = true, markers = "extra == \"format\""} rpds-py = ">=0.7.1" +uri-template = {version = "*", optional = true, markers = "extra == \"format\""} +webcolors = {version = ">=1.11", optional = true, markers = "extra == \"format\""} [package.extras] format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] @@ -1797,13 +2065,14 @@ format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339- [[package]] name = "jsonschema-specifications" -version = "2024.10.1" +version = "2025.4.1" description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, + {file = "jsonschema_specifications-2025.4.1-py3-none-any.whl", hash = "sha256:4653bffbd6584f7de83a67e0d620ef16900b390ddc7939d56684d6c81e33f1af"}, + {file = "jsonschema_specifications-2025.4.1.tar.gz", hash = "sha256:630159c9f4dbea161a6a2205c3011cc4f18ff381b189fff48bb39b9bf26ae608"}, ] [package.dependencies] @@ -1815,6 +2084,7 @@ version = "1.9" description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "junit-xml-1.9.tar.gz", hash = "sha256:de16a051990d4e25a3982b2dd9e89d671067548718866416faec14d9de56db9f"}, {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, @@ -1823,15 +2093,45 @@ files = [ [package.dependencies] six = "*" +[[package]] +name = "keyring" +version = "25.6.0" +description = "Store and access your passwords safely." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd"}, + {file = "keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66"}, +] + +[package.dependencies] +"jaraco.classes" = "*" +"jaraco.context" = "*" +"jaraco.functools" = "*" +jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} +pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} +SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +completion = ["shtab (>=1.1.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["pyfakefs", "pytest (>=6,!=8.1.*)"] +type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"] + [[package]] name = "kr8s" -version = "0.18.1" +version = "0.20.7" description = "A Kubernetes API library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "kr8s-0.18.1-py3-none-any.whl", hash = "sha256:192d659c70c7650e7641c3c69a656ac16e51672118468eef8224ea60009932c4"}, - {file = "kr8s-0.18.1.tar.gz", hash = "sha256:73c864c108e2f5159faab8dba9833011d586918f4520dfc64594df7b7907493f"}, + {file = "kr8s-0.20.7-py3-none-any.whl", hash = "sha256:e489b97ff513c167f427f479ad5420c78adffd1a6ce5033b079109374200c0c6"}, + {file = "kr8s-0.20.7.tar.gz", hash = "sha256:ac45e966beea0f6f92f635b3e61e64b8e27962b4825d77b814a663e819a8ec16"}, ] [package.dependencies] @@ -1839,7 +2139,7 @@ anyio = ">=3.7.0" asyncache = ">=0.3.1" cryptography = ">=35" httpx = ">=0.24.1" -httpx-ws = ">=0.5.2" +httpx-ws = ">=0.7.0" python-box = ">=7.0.1" python-jsonpath = ">=0.7.1" pyyaml = ">=6.0" @@ -1855,6 +2155,7 @@ version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, @@ -1878,13 +2179,14 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "kubernetes-asyncio" -version = "31.1.0" +version = "32.3.0" description = "Kubernetes asynchronous python client" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "kubernetes_asyncio-31.1.0-py3-none-any.whl", hash = "sha256:76898fea5dee601b209fefeae4ecee2fb20bfe3ebf872b5ff37c96230fbda6cc"}, - {file = "kubernetes_asyncio-31.1.0.tar.gz", hash = "sha256:00128a96eb0284de0cbee53bd2fe044593f2e1547c48d09901cddf9258adfd88"}, + {file = "kubernetes_asyncio-32.3.0-py3-none-any.whl", hash = "sha256:3a0769d4bf39c638e474c76cd22f4aa81903db5ebd14573c1e3b3b7ebbf86fbc"}, + {file = "kubernetes_asyncio-32.3.0.tar.gz", hash = "sha256:3efdc39776f4e1c892ce08b74364e67be6c1d6870cba01ab27bb296fdc6fc485"}, ] [package.dependencies] @@ -1895,15 +2197,33 @@ pyyaml = ">=3.12" six = ">=1.9.0" urllib3 = ">=1.24.2" +[[package]] +name = "lark" +version = "0.12.0" +description = "a modern parsing library" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "lark-0.12.0-py2.py3-none-any.whl", hash = "sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca"}, + {file = "lark-0.12.0.tar.gz", hash = "sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef"}, +] + +[package.extras] +atomic-cache = ["atomicwrites"] +nearley = ["js2py"] +regex = ["regex"] + [[package]] name = "mako" -version = "1.3.6" +version = "1.3.10" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Mako-1.3.6-py3-none-any.whl", hash = "sha256:a91198468092a2f1a0de86ca92690fb0cfc43ca90ee17e15d93662b4c04b241a"}, - {file = "mako-1.3.6.tar.gz", hash = "sha256:9ec3a1583713479fae654f83ed9fa8c9a4c16b7bb0daba0e6bbebff50c0d983d"}, + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, ] [package.dependencies] @@ -1914,12 +2234,28 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-code-runner" +version = "2.2.0" +description = "Automatically execute code blocks within a Markdown file and update the output in-place" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markdown_code_runner-2.2.0-py3-none-any.whl", hash = "sha256:d8812c48ad3fd4a3f3725dfcd5a1b7e5baf7216855eeea8a92c7fd9120717ac6"}, + {file = "markdown_code_runner-2.2.0.tar.gz", hash = "sha256:3c495998a437bc7d7a4b1a5ce518bce10cf5ba0fa69c569fee1e32c5238603c4"}, +] + +[package.extras] +test = ["coverage", "pre-commit", "pytest", "pytest-cov"] + [[package]] name = "markdown-it-py" version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -1944,6 +2280,7 @@ version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, @@ -2010,13 +2347,14 @@ files = [ [[package]] name = "marshmallow" -version = "3.23.1" +version = "3.26.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, - {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, ] [package.dependencies] @@ -2024,7 +2362,7 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] tests = ["pytest", "simplejson"] [[package]] @@ -2033,6 +2371,7 @@ version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -2040,207 +2379,303 @@ files = [ [[package]] name = "mirakuru" -version = "2.5.3" +version = "2.6.0" description = "Process executor (not only) for tests." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "mirakuru-2.5.3-py3-none-any.whl", hash = "sha256:2fab68356fb98fb5358ea3ab65f5e511f34b5a0b16cfd0a0935ef15a3393f025"}, - {file = "mirakuru-2.5.3.tar.gz", hash = "sha256:39b33f8fcdf13764a6cfe936e0feeead3902a161fec438df3be7cce98f7933c6"}, + {file = "mirakuru-2.6.0-py3-none-any.whl", hash = "sha256:0ff7080997e63289dc309d0237e137ca2cfa863b3d26b3d5e8fd4e1c2b2ef659"}, + {file = "mirakuru-2.6.0.tar.gz", hash = "sha256:3256fcf81ef090a30be97a8ce50ff0c178292d7e542866c5fedc5ae6801e3a17"}, ] [package.dependencies] psutil = {version = ">=4.0.0", markers = "sys_platform != \"cygwin\""} +[[package]] +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, +] + +[[package]] +name = "more-itertools" +version = "10.7.0" +description = "More routines for operating on iterables, beyond itertools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e"}, + {file = "more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3"}, +] + +[[package]] +name = "msgpack" +version = "1.1.0" +description = "MessagePack serializer" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, + {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, + {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, + {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, + {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, + {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, + {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, + {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, + {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, + {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, + {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, + {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, + {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, + {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, + {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, + {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, + {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, + {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, + {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, + {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, + {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, + {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, + {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, + {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, + {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, + {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, + {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, + {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, + {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, + {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, + {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, +] + [[package]] name = "multidict" -version = "6.1.0" +version = "6.4.3" description = "multidict implementation" optional = false -python-versions = ">=3.8" -files = [ - {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, - {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, - {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, - {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, - {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, - {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, - {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, - {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, - {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, - {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, - {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, - {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, - {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, - {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, - {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, - {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, - {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, - {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, - {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, - {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, - {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, - {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, - {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, - {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, - {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, - {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, - {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, - {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, - {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, - {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, - {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, - {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, - {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, - {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, ] [[package]] name = "mypy" -version = "1.13.0" +version = "1.15.0" description = "Optional static typing for Python" optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" -typing-extensions = ">=4.6.0" +mypy_extensions = ">=1.0.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] faster-cache = ["orjson"] install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] [[package]] -name = "networkx" -version = "3.4.2" -description = "Python package for creating and manipulating graphs and networks" +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.10" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, - {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] -[package.extras] -default = ["matplotlib (>=3.7)", "numpy (>=1.24)", "pandas (>=2.0)", "scipy (>=1.10,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.15)", "sphinx (>=7.3)", "sphinx-gallery (>=0.16)", "texext (>=0.6.7)"] -example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=1.9)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] -extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - [[package]] name = "nodeenv" version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, @@ -2252,6 +2687,7 @@ version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -2264,13 +2700,26 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "parsy" +version = "2.1" +description = "Easy-to-use parser combinators, for parsing in pure Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "parsy-2.1-py3-none-any.whl", hash = "sha256:8f18e7b11985e7802e7e3ecbd8291c6ca243d29820b1186e4c84605db4efffa0"}, + {file = "parsy-2.1.tar.gz", hash = "sha256:fd5dd18d7b0b61f8275ee88665f430a20c02cf5a82d88557f35330530186d7ac"}, ] [[package]] @@ -2279,6 +2728,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -2286,30 +2736,71 @@ files = [ [[package]] name = "pbr" -version = "6.1.0" +version = "6.1.1" description = "Python Build Reasonableness" optional = false python-versions = ">=2.6" +groups = ["dev"] +files = [ + {file = "pbr-6.1.1-py2.py3-none-any.whl", hash = "sha256:38d4daea5d9fa63b3f626131b9d34947fd0c8be9b05a29276870580050a25a76"}, + {file = "pbr-6.1.1.tar.gz", hash = "sha256:93ea72ce6989eb2eed99d0f75721474f69ad88128afdef5ac377eb797c4bf76b"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "pbs-installer" +version = "2025.4.9" +description = "Installer for Python Build Standalone" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pbs_installer-2025.4.9-py3-none-any.whl", hash = "sha256:af110b398248584422f46760ce1e3793622fe3fbcde47aacd22e35baf8c3db1d"}, + {file = "pbs_installer-2025.4.9.tar.gz", hash = "sha256:15755bc94769a544af5dda155f973c70caf76f0e70b21f3c8a8ed506f102f88f"}, +] + +[package.dependencies] +httpx = {version = ">=0.27.0,<1", optional = true, markers = "extra == \"download\""} +zstandard = {version = ">=0.21.0", optional = true, markers = "extra == \"install\""} + +[package.extras] +all = ["pbs-installer[download,install]"] +download = ["httpx (>=0.27.0,<1)"] +install = ["zstandard (>=0.21.0)"] + +[[package]] +name = "pkginfo" +version = "1.12.1.2" +description = "Query metadata from sdists / bdists / installed packages." +optional = false +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pbr-6.1.0-py2.py3-none-any.whl", hash = "sha256:a776ae228892d8013649c0aeccbb3d5f99ee15e005a4cbb7e61d55a067b28a2a"}, - {file = "pbr-6.1.0.tar.gz", hash = "sha256:788183e382e3d1d7707db08978239965e8b9e4e5ed42669bf4758186734d5f24"}, + {file = "pkginfo-1.12.1.2-py3-none-any.whl", hash = "sha256:c783ac885519cab2c34927ccfa6bf64b5a704d7c69afaea583dd9b7afe969343"}, + {file = "pkginfo-1.12.1.2.tar.gz", hash = "sha256:5cd957824ac36f140260964eba3c6be6442a8359b8c48f4adf90210f33a04b7b"}, ] +[package.extras] +testing = ["pytest", "pytest-cov", "wheel"] + [[package]] name = "platformdirs" -version = "4.3.6" +version = "4.3.7" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] [[package]] name = "pluggy" @@ -2317,6 +2808,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2326,26 +2818,101 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "poetry" +version = "2.1.2" +description = "Python dependency management and packaging made easy." +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "poetry-2.1.2-py3-none-any.whl", hash = "sha256:df7dfe7e5f9cd50ed3b8d1a013afcc379645f66d7e9aa43728689e34fb016216"}, + {file = "poetry-2.1.2.tar.gz", hash = "sha256:6a0694645ee24ba93cb94254db66e47971344562ddd5578e82bf35e572bc546d"}, +] + +[package.dependencies] +build = ">=1.2.1,<2.0.0" +cachecontrol = {version = ">=0.14.0,<0.15.0", extras = ["filecache"]} +cleo = ">=2.1.0,<3.0.0" +dulwich = ">=0.22.6,<0.23.0" +fastjsonschema = ">=2.18.0,<3.0.0" +findpython = ">=0.6.2,<0.7.0" +installer = ">=0.7.0,<0.8.0" +keyring = ">=25.1.0,<26.0.0" +packaging = ">=24.0" +pbs-installer = {version = ">=2025.1.6,<2026.0.0", extras = ["download", "install"]} +pkginfo = ">=1.12,<2.0" +platformdirs = ">=3.0.0,<5" +poetry-core = "2.1.2" +pyproject-hooks = ">=1.0.0,<2.0.0" +requests = ">=2.26,<3.0" +requests-toolbelt = ">=1.0.0,<2.0.0" +shellingham = ">=1.5,<2.0" +tomlkit = ">=0.11.4,<1.0.0" +trove-classifiers = ">=2022.5.19" +virtualenv = ">=20.26.6,<21.0.0" +xattr = {version = ">=1.0.0,<2.0.0", markers = "sys_platform == \"darwin\""} + +[[package]] +name = "poetry-core" +version = "2.1.2" +description = "Poetry PEP 517 Build Backend" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "poetry_core-2.1.2-py3-none-any.whl", hash = "sha256:ecb1e8f7d4f071a21cd0feb8c19bd1aec80de6fb0e82aa9d809a591e544431b4"}, + {file = "poetry_core-2.1.2.tar.gz", hash = "sha256:f9dbbbd0ebf9755476a1d57f04b30e9aecf71ca9dc2fcd4b17aba92c0002aa04"}, +] + [[package]] name = "port-for" version = "0.7.4" description = "Utility that helps with local TCP ports management. It can find an unused TCP localhost port and remember the association." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ {file = "port_for-0.7.4-py3-none-any.whl", hash = "sha256:08404aa072651a53dcefe8d7a598ee8a1dca320d9ac44ac464da16ccf2a02c4a"}, {file = "port_for-0.7.4.tar.gz", hash = "sha256:fc7713e7b22f89442f335ce12536653656e8f35146739eccaeff43d28436028d"}, ] +[[package]] +name = "posthog" +version = "3.25.0" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "posthog-3.25.0-py2.py3-none-any.whl", hash = "sha256:85db78c13d1ecb11aed06fad53759c4e8fb3633442c2f3d0336bc0ce8a585d30"}, + {file = "posthog-3.25.0.tar.gz", hash = "sha256:9168f3e7a0a5571b6b1065c41b3c171fbc68bfe72c3ac0bfd6e3d2fcdb7df2ca"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +distro = ">=1.5.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"] +langchain = ["langchain (>=0.2.0)"] +sentry = ["django", "sentry-sdk"] +test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"] + [[package]] name = "pre-commit" -version = "4.0.1" +version = "4.2.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"}, - {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"}, + {file = "pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd"}, + {file = "pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146"}, ] [package.dependencies] @@ -2361,6 +2928,7 @@ version = "0.7.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "prometheus_client-0.7.1.tar.gz", hash = "sha256:71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da"}, ] @@ -2374,6 +2942,7 @@ version = "3.0.0" description = "Exposes Prometheus monitoring metrics of Sanic apps." optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "prometheus-sanic-3.0.0.tar.gz", hash = "sha256:06cfe8f9c843a1324fa801b9092f26470a63196b9e08fad0c0f12b49ddbf6c3c"}, {file = "prometheus_sanic-3.0.0-py3-none-any.whl", hash = "sha256:499110bf2a86f921b229083e0bcea4d489420abf6737e0d838cd234394fd91aa"}, @@ -2385,256 +2954,271 @@ sanic = ">=22.0.0" [[package]] name = "propcache" -version = "0.2.0" +version = "0.3.1" description = "Accelerated property cache" optional = false -python-versions = ">=3.8" -files = [ - {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:c5869b8fd70b81835a6f187c5fdbe67917a04d7e52b6e7cc4e5fe39d55c39d58"}, - {file = "propcache-0.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:952e0d9d07609d9c5be361f33b0d6d650cd2bae393aabb11d9b719364521984b"}, - {file = "propcache-0.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:33ac8f098df0585c0b53009f039dfd913b38c1d2edafed0cedcc0c32a05aa110"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:97e48e8875e6c13909c800fa344cd54cc4b2b0db1d5f911f840458a500fde2c2"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388f3217649d6d59292b722d940d4d2e1e6a7003259eb835724092a1cca0203a"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f571aea50ba5623c308aa146eb650eebf7dbe0fd8c5d946e28343cb3b5aad577"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3dfafb44f7bb35c0c06eda6b2ab4bfd58f02729e7c4045e179f9a861b07c9850"}, - {file = "propcache-0.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3ebe9a75be7ab0b7da2464a77bb27febcb4fab46a34f9288f39d74833db7f61"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2f0d0f976985f85dfb5f3d685697ef769faa6b71993b46b295cdbbd6be8cc37"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:a3dc1a4b165283bd865e8f8cb5f0c64c05001e0718ed06250d8cac9bec115b48"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e0f07b42d2a50c7dd2d8675d50f7343d998c64008f1da5fef888396b7f84630"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e63e3e1e0271f374ed489ff5ee73d4b6e7c60710e1f76af5f0e1a6117cd26394"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:56bb5c98f058a41bb58eead194b4db8c05b088c93d94d5161728515bd52b052b"}, - {file = "propcache-0.2.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7665f04d0c7f26ff8bb534e1c65068409bf4687aa2534faf7104d7182debb336"}, - {file = "propcache-0.2.0-cp310-cp310-win32.whl", hash = "sha256:7cf18abf9764746b9c8704774d8b06714bcb0a63641518a3a89c7f85cc02c2ad"}, - {file = "propcache-0.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:cfac69017ef97db2438efb854edf24f5a29fd09a536ff3a992b75990720cdc99"}, - {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:63f13bf09cc3336eb04a837490b8f332e0db41da66995c9fd1ba04552e516354"}, - {file = "propcache-0.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608cce1da6f2672a56b24a015b42db4ac612ee709f3d29f27a00c943d9e851de"}, - {file = "propcache-0.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:466c219deee4536fbc83c08d09115249db301550625c7fef1c5563a584c9bc87"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc2db02409338bf36590aa985a461b2c96fce91f8e7e0f14c50c5fcc4f229016"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6ed8db0a556343d566a5c124ee483ae113acc9a557a807d439bcecc44e7dfbb"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:91997d9cb4a325b60d4e3f20967f8eb08dfcb32b22554d5ef78e6fd1dda743a2"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c7dde9e533c0a49d802b4f3f218fa9ad0a1ce21f2c2eb80d5216565202acab4"}, - {file = "propcache-0.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffcad6c564fe6b9b8916c1aefbb37a362deebf9394bd2974e9d84232e3e08504"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:97a58a28bcf63284e8b4d7b460cbee1edaab24634e82059c7b8c09e65284f178"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:945db8ee295d3af9dbdbb698cce9bbc5c59b5c3fe328bbc4387f59a8a35f998d"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:39e104da444a34830751715f45ef9fc537475ba21b7f1f5b0f4d71a3b60d7fe2"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c5ecca8f9bab618340c8e848d340baf68bcd8ad90a8ecd7a4524a81c1764b3db"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c436130cc779806bdf5d5fae0d848713105472b8566b75ff70048c47d3961c5b"}, - {file = "propcache-0.2.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:191db28dc6dcd29d1a3e063c3be0b40688ed76434622c53a284e5427565bbd9b"}, - {file = "propcache-0.2.0-cp311-cp311-win32.whl", hash = "sha256:5f2564ec89058ee7c7989a7b719115bdfe2a2fb8e7a4543b8d1c0cc4cf6478c1"}, - {file = "propcache-0.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:6e2e54267980349b723cff366d1e29b138b9a60fa376664a157a342689553f71"}, - {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ee7606193fb267be4b2e3b32714f2d58cad27217638db98a60f9efb5efeccc2"}, - {file = "propcache-0.2.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:91ee8fc02ca52e24bcb77b234f22afc03288e1dafbb1f88fe24db308910c4ac7"}, - {file = "propcache-0.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2e900bad2a8456d00a113cad8c13343f3b1f327534e3589acc2219729237a2e8"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f52a68c21363c45297aca15561812d542f8fc683c85201df0bebe209e349f793"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e41d67757ff4fbc8ef2af99b338bfb955010444b92929e9e55a6d4dcc3c4f09"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a64e32f8bd94c105cc27f42d3b658902b5bcc947ece3c8fe7bc1b05982f60e89"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55346705687dbd7ef0d77883ab4f6fabc48232f587925bdaf95219bae072491e"}, - {file = "propcache-0.2.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:00181262b17e517df2cd85656fcd6b4e70946fe62cd625b9d74ac9977b64d8d9"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6994984550eaf25dd7fc7bd1b700ff45c894149341725bb4edc67f0ffa94efa4"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:56295eb1e5f3aecd516d91b00cfd8bf3a13991de5a479df9e27dd569ea23959c"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:439e76255daa0f8151d3cb325f6dd4a3e93043e6403e6491813bcaaaa8733887"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:f6475a1b2ecb310c98c28d271a30df74f9dd436ee46d09236a6b750a7599ce57"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3444cdba6628accf384e349014084b1cacd866fbb88433cd9d279d90a54e0b23"}, - {file = "propcache-0.2.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:4a9d9b4d0a9b38d1c391bb4ad24aa65f306c6f01b512e10a8a34a2dc5675d348"}, - {file = "propcache-0.2.0-cp312-cp312-win32.whl", hash = "sha256:69d3a98eebae99a420d4b28756c8ce6ea5a29291baf2dc9ff9414b42676f61d5"}, - {file = "propcache-0.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:ad9c9b99b05f163109466638bd30ada1722abb01bbb85c739c50b6dc11f92dc3"}, - {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ecddc221a077a8132cf7c747d5352a15ed763b674c0448d811f408bf803d9ad7"}, - {file = "propcache-0.2.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0e53cb83fdd61cbd67202735e6a6687a7b491c8742dfc39c9e01e80354956763"}, - {file = "propcache-0.2.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92fe151145a990c22cbccf9ae15cae8ae9eddabfc949a219c9f667877e40853d"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6a21ef516d36909931a2967621eecb256018aeb11fc48656e3257e73e2e247a"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f88a4095e913f98988f5b338c1d4d5d07dbb0b6bad19892fd447484e483ba6b"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5a5b3bb545ead161be780ee85a2b54fdf7092815995661947812dde94a40f6fb"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67aeb72e0f482709991aa91345a831d0b707d16b0257e8ef88a2ad246a7280bf"}, - {file = "propcache-0.2.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c997f8c44ec9b9b0bcbf2d422cc00a1d9b9c681f56efa6ca149a941e5560da2"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a66df3d4992bc1d725b9aa803e8c5a66c010c65c741ad901e260ece77f58d2f"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:3ebbcf2a07621f29638799828b8d8668c421bfb94c6cb04269130d8de4fb7136"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1235c01ddaa80da8235741e80815ce381c5267f96cc49b1477fdcf8c047ef325"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3947483a381259c06921612550867b37d22e1df6d6d7e8361264b6d037595f44"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d5bed7f9805cc29c780f3aee05de3262ee7ce1f47083cfe9f77471e9d6777e83"}, - {file = "propcache-0.2.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4a91d44379f45f5e540971d41e4626dacd7f01004826a18cb048e7da7e96544"}, - {file = "propcache-0.2.0-cp313-cp313-win32.whl", hash = "sha256:f902804113e032e2cdf8c71015651c97af6418363bea8d78dc0911d56c335032"}, - {file = "propcache-0.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8f188cfcc64fb1266f4684206c9de0e80f54622c3f22a910cbd200478aeae61e"}, - {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:53d1bd3f979ed529f0805dd35ddaca330f80a9a6d90bc0121d2ff398f8ed8861"}, - {file = "propcache-0.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:83928404adf8fb3d26793665633ea79b7361efa0287dfbd372a7e74311d51ee6"}, - {file = "propcache-0.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77a86c261679ea5f3896ec060be9dc8e365788248cc1e049632a1be682442063"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:218db2a3c297a3768c11a34812e63b3ac1c3234c3a086def9c0fee50d35add1f"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7735e82e3498c27bcb2d17cb65d62c14f1100b71723b68362872bca7d0913d90"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20a617c776f520c3875cf4511e0d1db847a076d720714ae35ffe0df3e440be68"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67b69535c870670c9f9b14a75d28baa32221d06f6b6fa6f77a0a13c5a7b0a5b9"}, - {file = "propcache-0.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4569158070180c3855e9c0791c56be3ceeb192defa2cdf6a3f39e54319e56b89"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:db47514ffdbd91ccdc7e6f8407aac4ee94cc871b15b577c1c324236b013ddd04"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:2a60ad3e2553a74168d275a0ef35e8c0a965448ffbc3b300ab3a5bb9956c2162"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:662dd62358bdeaca0aee5761de8727cfd6861432e3bb828dc2a693aa0471a563"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:25a1f88b471b3bc911d18b935ecb7115dff3a192b6fef46f0bfaf71ff4f12418"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:f60f0ac7005b9f5a6091009b09a419ace1610e163fa5deaba5ce3484341840e7"}, - {file = "propcache-0.2.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:74acd6e291f885678631b7ebc85d2d4aec458dd849b8c841b57ef04047833bed"}, - {file = "propcache-0.2.0-cp38-cp38-win32.whl", hash = "sha256:d9b6ddac6408194e934002a69bcaadbc88c10b5f38fb9307779d1c629181815d"}, - {file = "propcache-0.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:676135dcf3262c9c5081cc8f19ad55c8a64e3f7282a21266d05544450bffc3a5"}, - {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:25c8d773a62ce0451b020c7b29a35cfbc05de8b291163a7a0f3b7904f27253e6"}, - {file = "propcache-0.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:375a12d7556d462dc64d70475a9ee5982465fbb3d2b364f16b86ba9135793638"}, - {file = "propcache-0.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1ec43d76b9677637a89d6ab86e1fef70d739217fefa208c65352ecf0282be957"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f45eec587dafd4b2d41ac189c2156461ebd0c1082d2fe7013571598abb8505d1"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc092ba439d91df90aea38168e11f75c655880c12782facf5cf9c00f3d42b562"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa1076244f54bb76e65e22cb6910365779d5c3d71d1f18b275f1dfc7b0d71b4d"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:682a7c79a2fbf40f5dbb1eb6bfe2cd865376deeac65acf9beb607505dced9e12"}, - {file = "propcache-0.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e40876731f99b6f3c897b66b803c9e1c07a989b366c6b5b475fafd1f7ba3fb8"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:363ea8cd3c5cb6679f1c2f5f1f9669587361c062e4899fce56758efa928728f8"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:140fbf08ab3588b3468932974a9331aff43c0ab8a2ec2c608b6d7d1756dbb6cb"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e70fac33e8b4ac63dfc4c956fd7d85a0b1139adcfc0d964ce288b7c527537fea"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b33d7a286c0dc1a15f5fc864cc48ae92a846df287ceac2dd499926c3801054a6"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f6d5749fdd33d90e34c2efb174c7e236829147a2713334d708746e94c4bde40d"}, - {file = "propcache-0.2.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22aa8f2272d81d9317ff5756bb108021a056805ce63dd3630e27d042c8092798"}, - {file = "propcache-0.2.0-cp39-cp39-win32.whl", hash = "sha256:73e4b40ea0eda421b115248d7e79b59214411109a5bc47d0d48e4c73e3b8fcf9"}, - {file = "propcache-0.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:9517d5e9e0731957468c29dbfd0f976736a0e55afaea843726e887f36fe017df"}, - {file = "propcache-0.2.0-py3-none-any.whl", hash = "sha256:2ccc28197af5313706511fab3a8b66dcd6da067a1331372c82ea1cb74285e036"}, - {file = "propcache-0.2.0.tar.gz", hash = "sha256:df81779732feb9d01e5d513fad0122efb3d53bbc75f61b2a4f29a020bc985e70"}, +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, + {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, + {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, + {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, + {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, + {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, + {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, + {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, + {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, + {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, + {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, + {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, + {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, + {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, + {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, ] [[package]] name = "protobuf" -version = "5.29.0" +version = "5.29.4" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, +] + +[[package]] +name = "protovalidate" +version = "0.7.1" +description = "Protocol Buffer Validation for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "protobuf-5.29.0-cp310-abi3-win32.whl", hash = "sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2"}, - {file = "protobuf-5.29.0-cp310-abi3-win_amd64.whl", hash = "sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069"}, - {file = "protobuf-5.29.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20"}, - {file = "protobuf-5.29.0-cp38-cp38-win32.whl", hash = "sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a"}, - {file = "protobuf-5.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86"}, - {file = "protobuf-5.29.0-cp39-cp39-win32.whl", hash = "sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac"}, - {file = "protobuf-5.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368"}, - {file = "protobuf-5.29.0-py3-none-any.whl", hash = "sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f"}, - {file = "protobuf-5.29.0.tar.gz", hash = "sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001"}, + {file = "protovalidate-0.7.1-py3-none-any.whl", hash = "sha256:6788b1baa10c2e9453c3a3eef5f87a3e9c871bc9a7110b506aefd764269c8b3e"}, + {file = "protovalidate-0.7.1.tar.gz", hash = "sha256:12bd7c126fc000c5cbee5bf0f4cd01e0ba0e353f585b0aaa68df03e788939412"}, ] +[package.dependencies] +cel-python = "*" +protobuf = "*" + [[package]] name = "psutil" -version = "6.1.0" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" -files = [ - {file = "psutil-6.1.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ff34df86226c0227c52f38b919213157588a678d049688eded74c76c8ba4a5d0"}, - {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c0e0c00aa18ca2d3b2b991643b799a15fc8f0563d2ebb6040f64ce8dc027b942"}, - {file = "psutil-6.1.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:000d1d1ebd634b4efb383f4034437384e44a6d455260aaee2eca1e9c1b55f047"}, - {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:5cd2bcdc75b452ba2e10f0e8ecc0b57b827dd5d7aaffbc6821b2a9a242823a76"}, - {file = "psutil-6.1.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:045f00a43c737f960d273a83973b2511430d61f283a44c96bf13a6e829ba8fdc"}, - {file = "psutil-6.1.0-cp27-none-win32.whl", hash = "sha256:9118f27452b70bb1d9ab3198c1f626c2499384935aaf55388211ad982611407e"}, - {file = "psutil-6.1.0-cp27-none-win_amd64.whl", hash = "sha256:a8506f6119cff7015678e2bce904a4da21025cc70ad283a53b099e7620061d85"}, - {file = "psutil-6.1.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:6e2dcd475ce8b80522e51d923d10c7871e45f20918e027ab682f94f1c6351688"}, - {file = "psutil-6.1.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0895b8414afafc526712c498bd9de2b063deaac4021a3b3c34566283464aff8e"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9dcbfce5d89f1d1f2546a2090f4fcf87c7f669d1d90aacb7d7582addece9fb38"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:498c6979f9c6637ebc3a73b3f87f9eb1ec24e1ce53a7c5173b8508981614a90b"}, - {file = "psutil-6.1.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d905186d647b16755a800e7263d43df08b790d709d575105d419f8b6ef65423a"}, - {file = "psutil-6.1.0-cp36-cp36m-win32.whl", hash = "sha256:6d3fbbc8d23fcdcb500d2c9f94e07b1342df8ed71b948a2649b5cb060a7c94ca"}, - {file = "psutil-6.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1209036fbd0421afde505a4879dee3b2fd7b1e14fee81c0069807adcbbcca747"}, - {file = "psutil-6.1.0-cp37-abi3-win32.whl", hash = "sha256:1ad45a1f5d0b608253b11508f80940985d1d0c8f6111b5cb637533a0e6ddc13e"}, - {file = "psutil-6.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:a8fb3752b491d246034fa4d279ff076501588ce8cbcdbb62c32fd7a377d996be"}, - {file = "psutil-6.1.0.tar.gz", hash = "sha256:353815f59a7f64cdaca1c0307ee13558a0512f6db064e92fe833784f08539c7a"}, +version = "7.0.0" +description = "Cross-platform lib for process and system monitoring in Python. NOTE: the syntax of this script MUST be kept compatible with Python 2.7." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "psutil-7.0.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:101d71dc322e3cffd7cea0650b09b3d08b8e7c4109dd6809fe452dfd00e58b25"}, + {file = "psutil-7.0.0-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:39db632f6bb862eeccf56660871433e111b6ea58f2caea825571951d4b6aa3da"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1fcee592b4c6f146991ca55919ea3d1f8926497a713ed7faaf8225e174581e91"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b1388a4f6875d7e2aff5c4ca1cc16c545ed41dd8bb596cefea80111db353a34"}, + {file = "psutil-7.0.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f098451abc2828f7dc6b58d44b532b22f2088f4999a937557b603ce72b1993"}, + {file = "psutil-7.0.0-cp36-cp36m-win32.whl", hash = "sha256:84df4eb63e16849689f76b1ffcb36db7b8de703d1bc1fe41773db487621b6c17"}, + {file = "psutil-7.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:1e744154a6580bc968a0195fd25e80432d3afec619daf145b9e5ba16cc1d688e"}, + {file = "psutil-7.0.0-cp37-abi3-win32.whl", hash = "sha256:ba3fcef7523064a6c9da440fc4d6bd07da93ac726b5733c29027d7dc95b39d99"}, + {file = "psutil-7.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:4cf3d4eb1aa9b348dec30105c55cd9b7d4629285735a102beb4441e38db90553"}, + {file = "psutil-7.0.0.tar.gz", hash = "sha256:7be9c3eba38beccb6495ea33afd982a44074b78f28c434a1f51cc07fd315c456"}, ] [package.extras] -dev = ["black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "wheel"] +dev = ["abi3audit", "black (==24.10.0)", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "psycopg" -version = "3.2.3" +version = "3.2.6" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, - {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, + {file = "psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58"}, + {file = "psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a"}, ] [package.dependencies] -psycopg-binary = {version = "3.2.3", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} +psycopg-binary = {version = "3.2.6", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.3)"] -c = ["psycopg-c (==3.2.3)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.11)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.2.6) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.6) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.2.3" +version = "3.2.6" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.8" -files = [ - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73adc05452fb85e7a12ed3f69c81540a8875960739082e6ea5e28c373a30774"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8630943143c6d6ca9aefc88bbe5e76c90553f4e1a3b2dc339e67dc34aa86f7e"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bffb61e198a91f712cc3d7f2d176a697cb05b284b2ad150fb8edb308eba9002"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4fa2240c9fceddaa815a58f29212826fafe43ce80ff666d38c4a03fb036955"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:192a5f8496e6e1243fdd9ac20e117e667c0712f148c5f9343483b84435854c78"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64dc6e9ec64f592f19dc01a784e87267a64a743d34f68488924251253da3c818"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:79498df398970abcee3d326edd1d4655de7d77aa9aecd578154f8af35ce7bbd2"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:949551752930d5e478817e0b49956350d866b26578ced0042a61967e3fcccdea"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:80a2337e2dfb26950894c8301358961430a0304f7bfe729d34cc036474e9c9b1"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6d8f2144e0d5808c2e2aed40fbebe13869cd00c2ae745aca4b3b16a435edb056"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94253be2b57ef2fea7ffe08996067aabf56a1eb9648342c9e3bad9e10c46e045"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda0162b0dbfa5eaed6cdc708179fa27e148cb8490c7d62e5cf30713909658ea"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0419cdad8c70eaeb3116bb28e7b42d546f91baf5179d7556f230d40942dc78"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74fbf5dd3ef09beafd3557631e282f00f8af4e7a78fbfce8ab06d9cd5a789aae"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d784f614e4d53050cbe8abf2ae9d1aaacf8ed31ce57b42ce3bf2a48a66c3a5c"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4e76ce2475ed4885fe13b8254058be710ec0de74ebd8ef8224cf44a9a3358e5f"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5938b257b04c851c2d1e6cb2f8c18318f06017f35be9a5fe761ee1e2e344dfb7"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:257c4aea6f70a9aef39b2a77d0658a41bf05c243e2bf41895eb02220ac6306f3"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:06b5cc915e57621eebf2393f4173793ed7e3387295f07fed93ed3fb6a6ccf585"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:09baa041856b35598d335b1a74e19a49da8500acedf78164600694c0ba8ce21b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:48f8ca6ee8939bab760225b2ab82934d54330eec10afe4394a92d3f2a0c37dd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5361ea13c241d4f0ec3f95e0bf976c15e2e451e9cc7ef2e5ccfc9d170b197a40"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb987f14af7da7c24f803111dbc7392f5070fd350146af3345103f76ea82e339"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0463a11b1cace5a6aeffaf167920707b912b8986a9c7920341c75e3686277920"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b7be9a6c06518967b641fb15032b1ed682fd3b0443f64078899c61034a0bca6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64a607e630d9f4b2797f641884e52b9f8e239d35943f51bef817a384ec1678fe"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fa33ead69ed133210d96af0c63448b1385df48b9c0247eda735c5896b9e6dbbf"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1f8b0d0e99d8e19923e6e07379fa00570be5182c201a8c0b5aaa9a4d4a4ea20b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:709447bd7203b0b2debab1acec23123eb80b386f6c29e7604a5d4326a11e5bd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e37d5027e297a627da3551a1e962316d0f88ee4ada74c768f6c9234e26346d9"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:261f0031ee6074765096a19b27ed0f75498a8338c3dcd7f4f0d831e38adf12d1"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:41fdec0182efac66b27478ac15ef54c9ebcecf0e26ed467eb7d6f262a913318b"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:07d019a786eb020c0f984691aa1b994cb79430061065a694cf6f94056c603d26"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57615791a337378fe5381143259a6c432cdcbb1d3e6428bfb7ce59fff3fb5c"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8eb9a4e394926b93ad919cad1b0a918e9b4c846609e8c1cfb6b743683f64da0"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5905729668ef1418bd36fbe876322dcb0f90b46811bba96d505af89e6fbdce2f"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd65774ed7d65101b314808b6893e1a75b7664f680c3ef18d2e5c84d570fa393"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:700679c02f9348a0d0a2adcd33a0275717cd0d0aee9d4482b47d935023629505"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96334bb64d054e36fed346c50c4190bad9d7c586376204f50bede21a913bf942"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9099e443d4cc24ac6872e6a05f93205ba1a231b1a8917317b07c9ef2b955f1f4"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1985ab05e9abebfbdf3163a16ebb37fbc5d49aff2bf5b3d7375ff0920bbb54cd"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:e90352d7b610b4693fad0feea48549d4315d10f1eba5605421c92bb834e90170"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69320f05de8cdf4077ecd7fefdec223890eea232af0d58f2530cbda2871244a0"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4926ea5c46da30bec4a85907aa3f7e4ea6313145b2aa9469fdb861798daf1502"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64c4cd0d50d5b2288ab1bcb26c7126c772bbdebdfadcd77225a77df01c4a57e"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05a1bdce30356e70a05428928717765f4a9229999421013f41338d9680d03a63"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad357e426b0ea5c3043b8ec905546fa44b734bf11d33b3da3959f6e4447d350"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:967b47a0fd237aa17c2748fdb7425015c394a6fb57cdad1562e46a6eb070f96d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:71db8896b942770ed7ab4efa59b22eee5203be2dfdee3c5258d60e57605d688c"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2773f850a778575dd7158a6dd072f7925b67f3ba305e2003538e8831fec77a1d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aeddf7b3b3f6e24ccf7d0edfe2d94094ea76b40e831c16eff5230e040ce3b76b"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:824c867a38521d61d62b60aca7db7ca013a2b479e428a0db47d25d8ca5067410"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9994f7db390c17fc2bd4c09dca722fd792ff8a49bb3bdace0c50a83f22f1767d"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1303bf8347d6be7ad26d1362af2c38b3a90b8293e8d56244296488ee8591058e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:842da42a63ecb32612bb7f5b9e9f8617eab9bc23bd58679a441f4150fcc51c96"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb342a01c76f38a12432848e6013c57eb630103e7556cf79b705b53814c3949"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40af959173ea0d087b6b232b855cfeaa6738f47cb2a0fd10a7f4fa8b74293f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b60b465773a52c7d4705b0a751f7f1cdccf81dd12aee3b921b31a6e76b07b0e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc6d87a1c44df8d493ef44988a3ded751e284e02cdf785f746c2d357e99782a6"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f0b018e37608c3bfc6039a1dc4eb461e89334465a19916be0153c757a78ea426"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a29f5294b0b6360bfda69653697eff70aaf2908f58d1073b0acd6f6ab5b5a4f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:e56b1fd529e5dde2d1452a7d72907b37ed1b4f07fdced5d8fb1e963acfff6749"}, +groups = ["main"] +markers = "implementation_name != \"pypy\"" +files = [ + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1b639acb3e24243c23f75700bf6e3af7b76da92523ec7c3196a13aaf0b578453"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1b5c359173726b38d7acbb9f73270f269591d8031d099c1a70dd3f3d22b0e8a8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3434efe7c00f505f4c1e531519dac6c701df738ba7a1328eac81118d80019132"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bca8d9643191b13193940bbf84d51ac5a747e965c230177258fb02b8043fb7a"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55fa40f11d37e6e5149a282a5fd7e0734ce55c623673bfba638480914fd1414c"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0690ac1061c655b1bcbe9284d07bf5276bc9c0d788a6c74aaf3b042e64984b83"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9a4a9967ff650d2821d5fad6bec7b15f4c2072603e9fa3f89a39f351ade1fd3"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6f2894cc7aee8a15fe591e8536911d9c015cb404432cf7bdac2797e54cb2ba8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:05560c81312d7c2bee95a9860cd25198677f2320fb4a3527bc04e8cae7fcfb64"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4269cd23a485d6dd6eb6b10841c94551a53091cf0b1b6d5247a6a341f53f0d95"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:7942f35a6f314608720116bcd9de240110ceadffd2ac5c34f68f74a31e52e46a"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7afe181f6b3eb714362e9b6a2dc2a589bff60471a1d8639fd231a4e426e01523"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34bb0fceba0773dc0bfb53224bb2c0b19dc97ea0a997a223615484cf02cae55c"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54120122d2779dcd307f49e1f921d757fe5dacdced27deab37f277eef0c52a5b"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:816aa556f63b2303e66ba6c8888a8b3f3e6e4e47049ec7a4d62c84ac60b091ca"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19a0ba351eda9a59babf8c7c9d89c7bbc5b26bf096bc349b096bd0dd2482088"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e197e01290ef818a092c877025fc28096adbb6d0743e313491a21aab31bd96"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:274794b4b29ef426e09086404446b61a146f5e756da71366c5a6d57abec31f7d"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:69845bdc0db519e1dfc27932cd3d5b1ecb3f72950af52a1987508ab0b52b3b55"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:66c3bed2caf0d1cabcb9365064de183b5209a7cbeaa131e79e68f350c9c963c2"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e3ae3201fe85c7f901349a2cf52f02ceca4cb97a5e2e2ac8b8a1c9a6eb747bed"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:58f443b4df2adb59937c96775fadf4967f93d952fbcc82394446985faec11041"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f27a46ff0497e882e8c0286e8833c785b4d1a80f23e1bf606f4c90e5f9f3ce75"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b30ee4821ded7de48b8048b14952512588e7c5477b0a5965221e1798afba61a1"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e57edf3b1f5427f39660225b01f8e7b97f5cfab132092f014bf1638bc85d81d2"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c5172ce3e4ae7a4fd450070210f801e2ce6bc0f11d1208d29268deb0cda34de"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfab3804c43571a6615e559cdc4c4115785d258a4dd71a721be033f5f5f378d"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa1c920cce16f1205f37b20c685c58b9656b170b8b4c93629100d342d0d118e"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e118d818101c1608c6b5ba52a6c977614d8f05aa89467501172ba4d10588e11"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:763319a8bfeca77d31512da71f5a33459b9568a7621c481c3828c62f9c38f351"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2fbc05819560389dbece046966bc88e0f2ea77673497e274c4293b8b4c1d0703"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a57f99bb953b4bd6f32d0a9844664e7f6ca5ead9ba40e96635be3cd30794813"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:5de6809e19a465dcb9c269675bded46a135f2d600cd99f0735afbb21ddad2af4"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54af3fbf871baa2eb19df96fd7dc0cbd88e628a692063c3d1ab5cdd00aa04322"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ad5da1e4636776c21eaeacdec42f25fa4612631a12f25cd9ab34ddf2c346ffb9"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7956b9ea56f79cd86eddcfbfc65ae2af1e4fe7932fa400755005d903c709370"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e2efb763188008cf2914820dcb9fb23c10fe2be0d2c97ef0fac7cec28e281d8"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b3aab3451679f1e7932270e950259ed48c3b79390022d3f660491c0e65e4838"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849a370ac4e125f55f2ad37f928e588291a67ccf91fa33d0b1e042bb3ee1f986"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:566d4ace928419d91f1eb3227fc9ef7b41cf0ad22e93dd2c3368d693cf144408"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f1981f13b10de2f11cfa2f99a8738b35b3f0a0f3075861446894a8d3042430c0"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:36f598300b55b3c983ae8df06473ad27333d2fd9f3e2cfdb913b3a5aaa3a8bcf"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0f4699fa5fe1fffb0d6b2d14b31fd8c29b7ea7375f89d5989f002aaf21728b21"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:afe697b8b0071f497c5d4c0f41df9e038391534f5614f7fb3a8c1ca32d66e860"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da5554553b8d9fb7ab6bb1a37cc53f20ada9024916c60f40c09ab1a675323f2f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b7e3ccc43c395edba8039c9e407b01ed1844304c7f2f4aa99d34d04ed067c83"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d55405efc8a96aa0ecb2d5d6af552d35c744f160b133fa690814a68d9a952c8"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58d5cfb1687b69b3484a034d1aa6e5c11f0c1d46757e978ed59fab59ce83fd37"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3761c4107dab218c32ce4b10b1ae5ed686d41b882bfcb05f5bebc2be9488442f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:45f1526e12cb480586c74670f46563d3090fc2a93e859ccf71efae61f04cef4b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b4d4fd4415d5219785fb082e28d84be4fbd90c3bff3d861877db0aa6b0edd70b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:eb8a1e6b8130fee0b48107739e09553d50c6f031d0b3fcc33f885bb64fa01105"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7adf1460c05f7366f0fe9cf2d24e46abca9eb621705322bbd0c3f3e3a5edb2b4"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:28505f52ceef60554b5ab3289bf5aed2e7e57fa8e9a59a979d82db944e256a6c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:260c43c329e668606388cee78ec0dab083a25c2c6e6f9cf74a130fd5a27b0f87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9870e51fad4684dbdec057fa757d65e61cb2acb16236836e9360044c2a1ec880"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030e9c3082a931e972b029b3cef085784a3bf7f8e18367ae50d5b809aa6e1d87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60c9ed291fbd5e777c2c630dcfd10b7a87d68512b0757d5e7406d9c4895a82a"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e0f4a17a9c376c195e403b4826c18f325bd28f425231d36d1036258bf893e23"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac46da609624b16d961f604b3cbc3233ef43211ef1456a188f8c427109c9c3e1"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e77949b8e7014b85cee0bf6e9e041bcae7719b2693ebf59236368fb0b2a08814"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:532322d9ef6e7d178a4f344970b017110633bcc3dc1c3403efcef55aad612517"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:880c5fd76dcb50bdcc8f87359e5a6c7eb416697cc9aa02854c91223bd999c045"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c0cddc7458b8416d77cd8829d0192466502f31d1fb853d58613cf13ac64f41c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:ea158665676f42b19585dfe948071d3c5f28276f84a97522fb2e82c1d9194563"}, ] [[package]] @@ -2643,6 +3227,7 @@ version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, @@ -2650,39 +3235,18 @@ files = [ [[package]] name = "pyasn1-modules" -version = "0.4.1" +version = "0.4.2" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pyasn1_modules-0.4.1-py3-none-any.whl", hash = "sha256:49bfa96b45a292b711e986f222502c1c9a5e1f4e568fc30e2574a6c7d07838fd"}, - {file = "pyasn1_modules-0.4.1.tar.gz", hash = "sha256:c28e2dbf9c06ad61c71a075c7e0f9fd0f1b0bb2d2ad4377f240d33ac2ab60a7c"}, + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pyavro-gen" -version = "0.3.3" -description = "A typed class generator for Avro Schemata" -optional = false -python-versions = "*" -files = [ - {file = "pyavro-gen-0.3.3.tar.gz", hash = "sha256:0e2b71c7c3c147326f555ecffcb6b2d5af4f1760b42a85f53a4fe85879f30a69"}, - {file = "pyavro_gen-0.3.3-py3-none-any.whl", hash = "sha256:452f6acb178bf7d7d9eb3c78d1978bfeecefdb3fa2937a4baf3542ae28b6dc49"}, -] - -[package.dependencies] -avro-preprocessor = ">=0.1.12" -dataclasses-avroschema = ">=0.37.1" -factory-boy = ">=3.2.1" -faker = ">=15.1.1" -isort = ">=5.10.1" -networkx = ">=2.8.7" -pygments = ">=2.13.0" -pytz = ">=2022.5" -undictify = ">=0.11.3" +pyasn1 = ">=0.6.1,<0.7.0" [[package]] name = "pycparser" @@ -2690,6 +3254,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -2697,132 +3262,134 @@ files = [ [[package]] name = "pydantic" -version = "2.10.2" +version = "2.11.3" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] [package.dependencies] annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.27.1" +pydantic-core = "2.33.1" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.33.1" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, ] [package.dependencies] @@ -2830,13 +3397,14 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -2848,6 +3416,7 @@ version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -2862,12 +3431,25 @@ dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pyte docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] +[[package]] +name = "pyproject-hooks" +version = "1.2.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, +] + [[package]] name = "pyrate-limiter" version = "3.7.0" description = "Python Rate-Limiter using Leaky-Bucket Algorithm" optional = false python-versions = "<4.0,>=3.8" +groups = ["dev"] files = [ {file = "pyrate_limiter-3.7.0-py3-none-any.whl", hash = "sha256:cdbfc8f537d07e2bda76f5191b38aee972b26e1af020d880e3c1ef9d528227ac"}, {file = "pyrate_limiter-3.7.0.tar.gz", hash = "sha256:dc1e6d2c80b559f3333cb44bd822bd558f5a47946dc50cce4263a9c1c5fd8067"}, @@ -2879,13 +3461,14 @@ docs = ["furo (>=2022.3.4,<2023.0.0)", "myst-parser (>=0.17)", "sphinx (>=4.3.0, [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -2903,6 +3486,7 @@ version = "0.21.2" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest_asyncio-0.21.2-py3-none-any.whl", hash = "sha256:ab664c88bb7998f711d8039cacd4884da6430886ae8bbd4eded552ed2004f16b"}, {file = "pytest_asyncio-0.21.2.tar.gz", hash = "sha256:d67738fc232b94b326b9d060750beb16e0074210b98dd8b58a5239fa2a154f45"}, @@ -2917,13 +3501,14 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-cov" -version = "6.0.0" +version = "6.1.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, - {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, ] [package.dependencies] @@ -2939,6 +3524,7 @@ version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, @@ -2956,6 +3542,7 @@ version = "6.1.1" description = "Postgresql fixtures and fixture factories for Pytest." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_postgresql-6.1.1-py3-none-any.whl", hash = "sha256:bd4c0970d25685ac3d34d42263fcbfbf134bf02d22519fce7e1ccf4122d8b99a"}, {file = "pytest_postgresql-6.1.1.tar.gz", hash = "sha256:f996637367e6aecebba1349da52eea95340bdb434c90e4b79739e62c656056e2"}, @@ -2970,17 +3557,19 @@ setuptools = "*" [[package]] name = "pytest-subtests" -version = "0.7.0" +version = "0.14.1" description = "unittest subTest() support and subtests fixture" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "pytest-subtests-0.7.0.tar.gz", hash = "sha256:95c44c77e3fbede9848bb88ca90b384815fcba8090ef9a9f55659ab163b1681c"}, - {file = "pytest_subtests-0.7.0-py3-none-any.whl", hash = "sha256:2e3691caedea0c464fe96ffffd14bf872df1406b88d1930971dafe1966095bad"}, + {file = "pytest_subtests-0.14.1-py3-none-any.whl", hash = "sha256:e92a780d98b43118c28a16044ad9b841727bd7cb6a417073b38fd2d7ccdf052d"}, + {file = "pytest_subtests-0.14.1.tar.gz", hash = "sha256:350c00adc36c3aff676a66135c81aed9e2182e15f6c3ec8721366918bbbf7580"}, ] [package.dependencies] -pytest = ">=7.0" +attrs = ">=19.2.0" +pytest = ">=7.4" [[package]] name = "pytest-xdist" @@ -2988,6 +3577,7 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -3005,28 +3595,29 @@ testing = ["filelock"] [[package]] name = "python-box" -version = "7.2.0" +version = "7.3.2" description = "Advanced Python dictionaries with dot notation access" optional = false -python-versions = ">=3.8" -files = [ - {file = "python_box-7.2.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:6bdeec791e25258351388b3029a3ec5da302bb9ed3be175493c43cdc6c47f5e3"}, - {file = "python_box-7.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c449f7b3756a71479fa9c61a86e344ac00ed782a66d7662590f0afa294249d18"}, - {file = "python_box-7.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:6b0d61f182d394106d963232854e495b51edc178faa5316a797be1178212d7e0"}, - {file = "python_box-7.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e2d752de8c1204255bf7b0c814c59ef48293c187a7e9fdcd2fefa28024b72032"}, - {file = "python_box-7.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a6c35ea356a386077935958a5debcd5b229b9a1b3b26287a52dfe1a7e65d99"}, - {file = "python_box-7.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:32ed58ec4d9e5475efe69f9c7d773dfea90a6a01979e776da93fd2b0a5d04429"}, - {file = "python_box-7.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a2d664c6a27f7515469b6f1e461935a2038ee130b7d194b4b4db4e85d363618"}, - {file = "python_box-7.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5a7365db1aaf600d3e8a2747fcf6833beb5d45439a54318548f02e302e3ec"}, - {file = "python_box-7.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:739f827056ea148cbea3122d4617c994e829b420b1331183d968b175304e3a4f"}, - {file = "python_box-7.2.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:2617ef3c3d199f55f63c908f540a4dc14ced9b18533a879e6171c94a6a436f23"}, - {file = "python_box-7.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd866bed03087b1d8340014da8c3aaae19135767580641df1b4ae6fff6ac0aa"}, - {file = "python_box-7.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:9681f059e7e92bdf20782cd9ea6e533d4711fc7b8c57a462922a025d46add4d0"}, - {file = "python_box-7.2.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:6b59b1e2741c9ceecdf5a5bd9b90502c24650e609cd824d434fed3b6f302b7bb"}, - {file = "python_box-7.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23fae825d809ae7520fdeac88bb52be55a3b63992120a00e381783669edf589"}, - {file = "python_box-7.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:573b1abdcb7bd745fa404444f060ee62fc35a74f067181e55dcb43cfe92f2827"}, - {file = "python_box-7.2.0-py3-none-any.whl", hash = "sha256:a3c90832dd772cb0197fdb5bc06123b6e1b846899a1b53d9c39450d27a584829"}, - {file = "python_box-7.2.0.tar.gz", hash = "sha256:551af20bdab3a60a2a21e3435120453c4ca32f7393787c3a5036e1d9fc6a0ede"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_box-7.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d136163294fd61a1554db7dd203f2e3035064798d30c17d67d948f0de5c572de"}, + {file = "python_box-7.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d72e96547d8e2c2c333909826e9fae338d9a7e4cde07d5c6058cdd468432c0"}, + {file = "python_box-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:3aa52e3b5cc50c80bb7ef4be3e41e81d095310f619454a7ffd61eef3209a6225"}, + {file = "python_box-7.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:32163b1cb151883de0da62b0cd3572610dc72ccf0762f2447baf1d2562e25bea"}, + {file = "python_box-7.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:064cb59b41e25aaf7dbd39efe53151a5f6797cc1cb3c68610f0f21a9d406d67e"}, + {file = "python_box-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:488f0fba9a6416c3334b602366dcd92825adb0811e07e03753dfcf0ed79cd6f7"}, + {file = "python_box-7.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:39009a2da5c20133718b24891a206592adbe09169856aedc450ad1600fc2e511"}, + {file = "python_box-7.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2a72e2f6fb97c7e472ff3272da207ecc615aa222e52e98352391428527c469"}, + {file = "python_box-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9eead914b9fb7d98a1473f5027dcfe27d26b3a10ffa33b9ba22cf948a23cd280"}, + {file = "python_box-7.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1dfc3b9b073f3d7cad1fa90de98eaaa684a494d0574bbc0666f74fa8307fd6b6"}, + {file = "python_box-7.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca4685a7f764b5a71b6e08535ce2a96b7964bb63d8cb4df10f6bb7147b6c54b"}, + {file = "python_box-7.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e143295f74d47a9ab24562ead2375c9be10629599b57f2e86717d3fff60f82a9"}, + {file = "python_box-7.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f3118ab3076b645c76133b8fac51deee30237cecdcafc3af664c4b9000f04db9"}, + {file = "python_box-7.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a760074ba12ccc247796f43b6c61f686ada4b8349ab59e2a6303b27f3ae082"}, + {file = "python_box-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ea436e7ff5f87bd728472f1e31a9e6e95572c81028c44a8e00097e0968955638"}, + {file = "python_box-7.3.2-py3-none-any.whl", hash = "sha256:fd7d74d5a848623f93b5221fd9fb00b8c00ff0e130fa87f396277aa188659c92"}, + {file = "python_box-7.3.2.tar.gz", hash = "sha256:028b9917129e67f311932d93347b8a4f1b500d7a5a2870ee3c035f4e7b19403b"}, ] [package.extras] @@ -3035,7 +3626,7 @@ msgpack = ["msgpack"] pyyaml = ["PyYAML"] ruamel-yaml = ["ruamel.yaml (>=0.17)"] toml = ["toml"] -tomli = ["tomli", "tomli-w"] +tomli = ["tomli ; python_version < \"3.11\"", "tomli-w"] yaml = ["ruamel.yaml (>=0.17)"] [[package]] @@ -3044,6 +3635,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -3054,13 +3646,14 @@ six = ">=1.5" [[package]] name = "python-gitlab" -version = "5.1.0" -description = "A python wrapper for the GitLab API" +version = "5.6.0" +description = "The python wrapper for the GitLab REST and GraphQL APIs." optional = false python-versions = ">=3.9.0" +groups = ["main"] files = [ - {file = "python_gitlab-5.1.0-py3-none-any.whl", hash = "sha256:c30cf547392ce66daaaf020839cfb6c15a91b26e2e7054d1b3f1b92e8dd65e7d"}, - {file = "python_gitlab-5.1.0.tar.gz", hash = "sha256:d5a10dae8328f32fb9214bd3f9dc199b4930cd496f81c9be42a0f8ff338aeb35"}, + {file = "python_gitlab-5.6.0-py3-none-any.whl", hash = "sha256:68980cd70929fc7f8f06d8a7b09bd046a6b79e1995c19d61249f046005099100"}, + {file = "python_gitlab-5.6.0.tar.gz", hash = "sha256:bc531e8ba3e5641b60409445d4919ace68a2c18cb0ec6d48fbced6616b954166"}, ] [package.dependencies] @@ -3074,13 +3667,14 @@ yaml = ["PyYaml (>=6.0.1)"] [[package]] name = "python-jsonpath" -version = "1.2.0" +version = "1.3.0" description = "JSONPath, JSON Pointer and JSON Patch for Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "python_jsonpath-1.2.0-py3-none-any.whl", hash = "sha256:3172c7b87098fced1ed84bd3492bd1a19ef1ad41d4f5b8a3e9a147c750ac08b3"}, - {file = "python_jsonpath-1.2.0.tar.gz", hash = "sha256:a29a84ec3ac38e5dcaa62ac2a215de72c4eb60cb1303e10700da980cf7873775"}, + {file = "python_jsonpath-1.3.0-py3-none-any.whl", hash = "sha256:ce586ec5bd934ce97bc2f06600b00437d9684138b77273ced5b70694a8ef3a76"}, + {file = "python_jsonpath-1.3.0.tar.gz", hash = "sha256:ea5eb4d9b1296c8c19cc53538eb0f20fc54128f84571559ee63539e57875fefe"}, ] [[package]] @@ -3089,6 +3683,7 @@ version = "3.0.0" description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -3098,14 +3693,16 @@ files = [ pydantic = ["pydantic (>=2.0)"] [[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" +name = "pywin32-ctypes" +version = "0.2.3" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" optional = false -python-versions = "*" +python-versions = ">=3.6" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, + {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, ] [[package]] @@ -3114,6 +3711,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -3171,29 +3769,122 @@ files = [ ] [[package]] -name = "redis" -version = "5.2.0" -description = "Python client for Redis database and key-value store" +name = "rapidfuzz" +version = "3.13.0" +description = "rapid fuzzy string matching" optional = false -python-versions = ">=3.8" -files = [ - {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, - {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "rapidfuzz-3.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aafc42a1dc5e1beeba52cd83baa41372228d6d8266f6d803c16dbabbcc156255"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:85c9a131a44a95f9cac2eb6e65531db014e09d89c4f18c7b1fa54979cb9ff1f3"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d7cec4242d30dd521ef91c0df872e14449d1dffc2a6990ede33943b0dae56c3"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e297c09972698c95649e89121e3550cee761ca3640cd005e24aaa2619175464e"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef0f5f03f61b0e5a57b1df7beafd83df993fd5811a09871bad6038d08e526d0d"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8cf5f7cd6e4d5eb272baf6a54e182b2c237548d048e2882258336533f3f02b7"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9256218ac8f1a957806ec2fb9a6ddfc6c32ea937c0429e88cf16362a20ed8602"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1bdd2e6d0c5f9706ef7595773a81ca2b40f3b33fd7f9840b726fb00c6c4eb2e"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5280be8fd7e2bee5822e254fe0a5763aa0ad57054b85a32a3d9970e9b09bbcbf"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd742c03885db1fce798a1cd87a20f47f144ccf26d75d52feb6f2bae3d57af05"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5435fcac94c9ecf0504bf88a8a60c55482c32e18e108d6079a0089c47f3f8cf6"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:93a755266856599be4ab6346273f192acde3102d7aa0735e2f48b456397a041f"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-win32.whl", hash = "sha256:3abe6a4e8eb4cfc4cda04dd650a2dc6d2934cbdeda5def7e6fd1c20f6e7d2a0b"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8ddb58961401da7d6f55f185512c0d6bd24f529a637078d41dd8ffa5a49c107"}, + {file = "rapidfuzz-3.13.0-cp310-cp310-win_arm64.whl", hash = "sha256:c523620d14ebd03a8d473c89e05fa1ae152821920c3ff78b839218ff69e19ca3"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d395a5cad0c09c7f096433e5fd4224d83b53298d53499945a9b0e5a971a84f3a"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7b3eda607a019169f7187328a8d1648fb9a90265087f6903d7ee3a8eee01805"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98e0bfa602e1942d542de077baf15d658bd9d5dcfe9b762aff791724c1c38b70"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bef86df6d59667d9655905b02770a0c776d2853971c0773767d5ef8077acd624"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fedd316c165beed6307bf754dee54d3faca2c47e1f3bcbd67595001dfa11e969"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5158da7f2ec02a930be13bac53bb5903527c073c90ee37804090614cab83c29e"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b6f913ee4618ddb6d6f3e387b76e8ec2fc5efee313a128809fbd44e65c2bbb2"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d25fdbce6459ccbbbf23b4b044f56fbd1158b97ac50994eaae2a1c0baae78301"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25343ccc589a4579fbde832e6a1e27258bfdd7f2eb0f28cb836d6694ab8591fc"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a9ad1f37894e3ffb76bbab76256e8a8b789657183870be11aa64e306bb5228fd"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5dc71ef23845bb6b62d194c39a97bb30ff171389c9812d83030c1199f319098c"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b7f4c65facdb94f44be759bbd9b6dda1fa54d0d6169cdf1a209a5ab97d311a75"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-win32.whl", hash = "sha256:b5104b62711565e0ff6deab2a8f5dbf1fbe333c5155abe26d2cfd6f1849b6c87"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:9093cdeb926deb32a4887ebe6910f57fbcdbc9fbfa52252c10b56ef2efb0289f"}, + {file = "rapidfuzz-3.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:f70f646751b6aa9d05be1fb40372f006cc89d6aad54e9d79ae97bd1f5fce5203"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a1a6a906ba62f2556372282b1ef37b26bca67e3d2ea957277cfcefc6275cca7"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fd0975e015b05c79a97f38883a11236f5a24cca83aa992bd2558ceaa5652b26"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d4e13593d298c50c4f94ce453f757b4b398af3fa0fd2fde693c3e51195b7f69"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed6f416bda1c9133000009d84d9409823eb2358df0950231cc936e4bf784eb97"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc82b6ed01acb536b94a43996a94471a218f4d89f3fdd9185ab496de4b2a981"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9d824de871daa6e443b39ff495a884931970d567eb0dfa213d234337343835f"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d18228a2390375cf45726ce1af9d36ff3dc1f11dce9775eae1f1b13ac6ec50f"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5fe634c9482ec5d4a6692afb8c45d370ae86755e5f57aa6c50bfe4ca2bdd87"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:694eb531889f71022b2be86f625a4209c4049e74be9ca836919b9e395d5e33b3"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:11b47b40650e06147dee5e51a9c9ad73bb7b86968b6f7d30e503b9f8dd1292db"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:98b8107ff14f5af0243f27d236bcc6e1ef8e7e3b3c25df114e91e3a99572da73"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b836f486dba0aceb2551e838ff3f514a38ee72b015364f739e526d720fdb823a"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-win32.whl", hash = "sha256:4671ee300d1818d7bdfd8fa0608580d7778ba701817216f0c17fb29e6b972514"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e2065f68fb1d0bf65adc289c1bdc45ba7e464e406b319d67bb54441a1b9da9e"}, + {file = "rapidfuzz-3.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:65cc97c2fc2c2fe23586599686f3b1ceeedeca8e598cfcc1b7e56dc8ca7e2aa7"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09e908064d3684c541d312bd4c7b05acb99a2c764f6231bd507d4b4b65226c23"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:57c390336cb50d5d3bfb0cfe1467478a15733703af61f6dffb14b1cd312a6fae"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0da54aa8547b3c2c188db3d1c7eb4d1bb6dd80baa8cdaeaec3d1da3346ec9caa"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df8e8c21e67afb9d7fbe18f42c6111fe155e801ab103c81109a61312927cc611"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:461fd13250a2adf8e90ca9a0e1e166515cbcaa5e9c3b1f37545cbbeff9e77f6b"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2b3dd5d206a12deca16870acc0d6e5036abeb70e3cad6549c294eff15591527"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1343d745fbf4688e412d8f398c6e6d6f269db99a54456873f232ba2e7aeb4939"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b1b065f370d54551dcc785c6f9eeb5bd517ae14c983d2784c064b3aa525896df"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:11b125d8edd67e767b2295eac6eb9afe0b1cdc82ea3d4b9257da4b8e06077798"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c33f9c841630b2bb7e69a3fb5c84a854075bb812c47620978bddc591f764da3d"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ae4574cb66cf1e85d32bb7e9ec45af5409c5b3970b7ceb8dea90168024127566"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e05752418b24bbd411841b256344c26f57da1148c5509e34ea39c7eb5099ab72"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-win32.whl", hash = "sha256:0e1d08cb884805a543f2de1f6744069495ef527e279e05370dd7c83416af83f8"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9a7c6232be5f809cd39da30ee5d24e6cadd919831e6020ec6c2391f4c3bc9264"}, + {file = "rapidfuzz-3.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:3f32f15bacd1838c929b35c84b43618481e1b3d7a61b5ed2db0291b70ae88b53"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cc64da907114d7a18b5e589057e3acaf2fec723d31c49e13fedf043592a3f6a7"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d9d7f84c8e992a8dbe5a3fdbea73d733da39bf464e62c912ac3ceba9c0cff93"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a79a2f07786a2070669b4b8e45bd96a01c788e7a3c218f531f3947878e0f956"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f338e71c45b69a482de8b11bf4a029993230760120c8c6e7c9b71760b6825a1"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adb40ca8ddfcd4edd07b0713a860be32bdf632687f656963bcbce84cea04b8d8"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48719f7dcf62dfb181063b60ee2d0a39d327fa8ad81b05e3e510680c44e1c078"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9327a4577f65fc3fb712e79f78233815b8a1c94433d0c2c9f6bc5953018b3565"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:200030dfc0a1d5d6ac18e993c5097c870c97c41574e67f227300a1fb74457b1d"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:cc269e74cad6043cb8a46d0ce580031ab642b5930562c2bb79aa7fbf9c858d26"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:e62779c6371bd2b21dbd1fdce89eaec2d93fd98179d36f61130b489f62294a92"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f4797f821dc5d7c2b6fc818b89f8a3f37bcc900dd9e4369e6ebf1e525efce5db"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d21f188f6fe4fbf422e647ae9d5a68671d00218e187f91859c963d0738ccd88c"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-win32.whl", hash = "sha256:45dd4628dd9c21acc5c97627dad0bb791764feea81436fb6e0a06eef4c6dceaa"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:624a108122039af89ddda1a2b7ab2a11abe60c1521956f142f5d11bcd42ef138"}, + {file = "rapidfuzz-3.13.0-cp39-cp39-win_arm64.whl", hash = "sha256:435071fd07a085ecbf4d28702a66fd2e676a03369ee497cc38bcb69a46bc77e2"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe5790a36d33a5d0a6a1f802aa42ecae282bf29ac6f7506d8e12510847b82a45"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cdb33ee9f8a8e4742c6b268fa6bd739024f34651a06b26913381b1413ebe7590"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c99b76b93f7b495eee7dcb0d6a38fb3ce91e72e99d9f78faa5664a881cb2b7d"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af42f2ede8b596a6aaf6d49fdee3066ca578f4856b85ab5c1e2145de367a12d"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c0efa73afbc5b265aca0d8a467ae2a3f40d6854cbe1481cb442a62b7bf23c99"}, + {file = "rapidfuzz-3.13.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7ac21489de962a4e2fc1e8f0b0da4aa1adc6ab9512fd845563fecb4b4c52093a"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1ba007f4d35a45ee68656b2eb83b8715e11d0f90e5b9f02d615a8a321ff00c27"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d7a217310429b43be95b3b8ad7f8fc41aba341109dc91e978cd7c703f928c58f"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:558bf526bcd777de32b7885790a95a9548ffdcce68f704a81207be4a286c1095"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:202a87760f5145140d56153b193a797ae9338f7939eb16652dd7ff96f8faf64c"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcccc08f671646ccb1e413c773bb92e7bba789e3a1796fd49d23c12539fe2e4"}, + {file = "rapidfuzz-3.13.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f219f1e3c3194d7a7de222f54450ce12bc907862ff9a8962d83061c1f923c86"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ccbd0e7ea1a216315f63ffdc7cd09c55f57851afc8fe59a74184cb7316c0598b"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a50856f49a4016ef56edd10caabdaf3608993f9faf1e05c3c7f4beeac46bd12a"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fd05336db4d0b8348d7eaaf6fa3c517b11a56abaa5e89470ce1714e73e4aca7"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:573ad267eb9b3f6e9b04febce5de55d8538a87c56c64bf8fd2599a48dc9d8b77"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:30fd1451f87ccb6c2f9d18f6caa483116bbb57b5a55d04d3ddbd7b86f5b14998"}, + {file = "rapidfuzz-3.13.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a6dd36d4916cf57ddb05286ed40b09d034ca5d4bca85c17be0cb6a21290597d9"}, + {file = "rapidfuzz-3.13.0.tar.gz", hash = "sha256:d2eaf3839e52cbcc0accbe9817a67b4b0fcf70aaeb229cfddc1c28061f9ce5d8"}, ] [package.extras] -hiredis = ["hiredis (>=3.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] +all = ["numpy"] [[package]] name = "referencing" -version = "0.35.1" +version = "0.36.2" description = "JSON Referencing + Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, + {file = "referencing-0.36.2-py3-none-any.whl", hash = "sha256:e8699adbbf8b5c7de96d8ffa0eb5c158b3beafce084968e2ea8bb08c6794dcd0"}, + {file = "referencing-0.36.2.tar.gz", hash = "sha256:df2e89862cd09deabbdba16944cc3f10feb6b3e6f18e902f7cc25609a34775aa"}, ] [package.dependencies] @@ -3206,6 +3897,7 @@ version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -3227,6 +3919,7 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -3245,6 +3938,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -3253,15 +3947,43 @@ files = [ [package.dependencies] requests = ">=2.0.1,<3.0.0" +[[package]] +name = "rfc3339-validator" +version = "0.1.4" +description = "A pure python RFC3339 validator" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["dev"] +files = [ + {file = "rfc3339_validator-0.1.4-py2.py3-none-any.whl", hash = "sha256:24f6ec1eda14ef823da9e36ec7113124b39c04d50a4d3d3a3c2859577e7791fa"}, + {file = "rfc3339_validator-0.1.4.tar.gz", hash = "sha256:138a2abdf93304ad60530167e51d2dfb9549521a836871b88d7f4695d0022f6b"}, +] + +[package.dependencies] +six = "*" + +[[package]] +name = "rfc3987" +version = "1.3.8" +description = "Parsing and validation of URIs (RFC 3986) and IRIs (RFC 3987)" +optional = false +python-versions = "*" +groups = ["dev"] +files = [ + {file = "rfc3987-1.3.8-py2.py3-none-any.whl", hash = "sha256:10702b1e51e5658843460b189b185c0366d2cf4cff716f13111b0ea9fd2dce53"}, + {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, +] + [[package]] name = "rich" -version = "13.9.4" +version = "14.0.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, ] [package.dependencies] @@ -3273,112 +3995,138 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "rpds-py" -version = "0.21.0" +version = "0.24.0" description = "Python bindings to Rust's persistent data structures (rpds)" optional = false python-versions = ">=3.9" -files = [ - {file = "rpds_py-0.21.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a017f813f24b9df929674d0332a374d40d7f0162b326562daae8066b502d0590"}, - {file = "rpds_py-0.21.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:20cc1ed0bcc86d8e1a7e968cce15be45178fd16e2ff656a243145e0b439bd250"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad116dda078d0bc4886cb7840e19811562acdc7a8e296ea6ec37e70326c1b41c"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:808f1ac7cf3b44f81c9475475ceb221f982ef548e44e024ad5f9e7060649540e"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de552f4a1916e520f2703ec474d2b4d3f86d41f353e7680b597512ffe7eac5d0"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:efec946f331349dfc4ae9d0e034c263ddde19414fe5128580f512619abed05f1"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b80b4690bbff51a034bfde9c9f6bf9357f0a8c61f548942b80f7b66356508bf5"}, - {file = "rpds_py-0.21.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:085ed25baac88953d4283e5b5bd094b155075bb40d07c29c4f073e10623f9f2e"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8efac2a1273eed2354397a51216ae1e198ecbce9036fba4e7610b308b6153"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:95a5bad1ac8a5c77b4e658671642e4af3707f095d2b78a1fdd08af0dfb647624"}, - {file = "rpds_py-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3e53861b29a13d5b70116ea4230b5f0f3547b2c222c5daa090eb7c9c82d7f664"}, - {file = "rpds_py-0.21.0-cp310-none-win32.whl", hash = "sha256:ea3a6ac4d74820c98fcc9da4a57847ad2cc36475a8bd9683f32ab6d47a2bd682"}, - {file = "rpds_py-0.21.0-cp310-none-win_amd64.whl", hash = "sha256:b8f107395f2f1d151181880b69a2869c69e87ec079c49c0016ab96860b6acbe5"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5555db3e618a77034954b9dc547eae94166391a98eb867905ec8fcbce1308d95"}, - {file = "rpds_py-0.21.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:97ef67d9bbc3e15584c2f3c74bcf064af36336c10d2e21a2131e123ce0f924c9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ab2c2a26d2f69cdf833174f4d9d86118edc781ad9a8fa13970b527bf8236027"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4e8921a259f54bfbc755c5bbd60c82bb2339ae0324163f32868f63f0ebb873d9"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a7ff941004d74d55a47f916afc38494bd1cfd4b53c482b77c03147c91ac0ac3"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5145282a7cd2ac16ea0dc46b82167754d5e103a05614b724457cffe614f25bd8"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de609a6f1b682f70bb7163da745ee815d8f230d97276db049ab447767466a09d"}, - {file = "rpds_py-0.21.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40c91c6e34cf016fa8e6b59d75e3dbe354830777fcfd74c58b279dceb7975b75"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d2132377f9deef0c4db89e65e8bb28644ff75a18df5293e132a8d67748397b9f"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:0a9e0759e7be10109645a9fddaaad0619d58c9bf30a3f248a2ea57a7c417173a"}, - {file = "rpds_py-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9e20da3957bdf7824afdd4b6eeb29510e83e026473e04952dca565170cd1ecc8"}, - {file = "rpds_py-0.21.0-cp311-none-win32.whl", hash = "sha256:f71009b0d5e94c0e86533c0b27ed7cacc1239cb51c178fd239c3cfefefb0400a"}, - {file = "rpds_py-0.21.0-cp311-none-win_amd64.whl", hash = "sha256:e168afe6bf6ab7ab46c8c375606298784ecbe3ba31c0980b7dcbb9631dcba97e"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:30b912c965b2aa76ba5168fd610087bad7fcde47f0a8367ee8f1876086ee6d1d"}, - {file = "rpds_py-0.21.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca9989d5d9b1b300bc18e1801c67b9f6d2c66b8fd9621b36072ed1df2c977f72"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f54e7106f0001244a5f4cf810ba8d3f9c542e2730821b16e969d6887b664266"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fed5dfefdf384d6fe975cc026886aece4f292feaf69d0eeb716cfd3c5a4dd8be"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:590ef88db231c9c1eece44dcfefd7515d8bf0d986d64d0caf06a81998a9e8cab"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f983e4c2f603c95dde63df633eec42955508eefd8d0f0e6d236d31a044c882d7"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b229ce052ddf1a01c67d68166c19cb004fb3612424921b81c46e7ea7ccf7c3bf"}, - {file = "rpds_py-0.21.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ebf64e281a06c904a7636781d2e973d1f0926a5b8b480ac658dc0f556e7779f4"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:998a8080c4495e4f72132f3d66ff91f5997d799e86cec6ee05342f8f3cda7dca"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:98486337f7b4f3c324ab402e83453e25bb844f44418c066623db88e4c56b7c7b"}, - {file = "rpds_py-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a78d8b634c9df7f8d175451cfeac3810a702ccb85f98ec95797fa98b942cea11"}, - {file = "rpds_py-0.21.0-cp312-none-win32.whl", hash = "sha256:a58ce66847711c4aa2ecfcfaff04cb0327f907fead8945ffc47d9407f41ff952"}, - {file = "rpds_py-0.21.0-cp312-none-win_amd64.whl", hash = "sha256:e860f065cc4ea6f256d6f411aba4b1251255366e48e972f8a347cf88077b24fd"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ee4eafd77cc98d355a0d02f263efc0d3ae3ce4a7c24740010a8b4012bbb24937"}, - {file = "rpds_py-0.21.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:688c93b77e468d72579351a84b95f976bd7b3e84aa6686be6497045ba84be560"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c38dbf31c57032667dd5a2f0568ccde66e868e8f78d5a0d27dcc56d70f3fcd3b"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d6129137f43f7fa02d41542ffff4871d4aefa724a5fe38e2c31a4e0fd343fb0"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:520ed8b99b0bf86a176271f6fe23024323862ac674b1ce5b02a72bfeff3fff44"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aaeb25ccfb9b9014a10eaf70904ebf3f79faaa8e60e99e19eef9f478651b9b74"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af04ac89c738e0f0f1b913918024c3eab6e3ace989518ea838807177d38a2e94"}, - {file = "rpds_py-0.21.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b9b76e2afd585803c53c5b29e992ecd183f68285b62fe2668383a18e74abe7a3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5afb5efde74c54724e1a01118c6e5c15e54e642c42a1ba588ab1f03544ac8c7a"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:52c041802a6efa625ea18027a0723676a778869481d16803481ef6cc02ea8cb3"}, - {file = "rpds_py-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee1e4fc267b437bb89990b2f2abf6c25765b89b72dd4a11e21934df449e0c976"}, - {file = "rpds_py-0.21.0-cp313-none-win32.whl", hash = "sha256:0c025820b78817db6a76413fff6866790786c38f95ea3f3d3c93dbb73b632202"}, - {file = "rpds_py-0.21.0-cp313-none-win_amd64.whl", hash = "sha256:320c808df533695326610a1b6a0a6e98f033e49de55d7dc36a13c8a30cfa756e"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:2c51d99c30091f72a3c5d126fad26236c3f75716b8b5e5cf8effb18889ced928"}, - {file = "rpds_py-0.21.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cbd7504a10b0955ea287114f003b7ad62330c9e65ba012c6223dba646f6ffd05"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6dcc4949be728ede49e6244eabd04064336012b37f5c2200e8ec8eb2988b209c"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f414da5c51bf350e4b7960644617c130140423882305f7574b6cf65a3081cecb"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9afe42102b40007f588666bc7de82451e10c6788f6f70984629db193849dced1"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b929c2bb6e29ab31f12a1117c39f7e6d6450419ab7464a4ea9b0b417174f044"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8404b3717da03cbf773a1d275d01fec84ea007754ed380f63dfc24fb76ce4592"}, - {file = "rpds_py-0.21.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e12bb09678f38b7597b8346983d2323a6482dcd59e423d9448108c1be37cac9d"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:58a0e345be4b18e6b8501d3b0aa540dad90caeed814c515e5206bb2ec26736fd"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:c3761f62fcfccf0864cc4665b6e7c3f0c626f0380b41b8bd1ce322103fa3ef87"}, - {file = "rpds_py-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c2b2f71c6ad6c2e4fc9ed9401080badd1469fa9889657ec3abea42a3d6b2e1ed"}, - {file = "rpds_py-0.21.0-cp39-none-win32.whl", hash = "sha256:b21747f79f360e790525e6f6438c7569ddbfb1b3197b9e65043f25c3c9b489d8"}, - {file = "rpds_py-0.21.0-cp39-none-win_amd64.whl", hash = "sha256:0626238a43152918f9e72ede9a3b6ccc9e299adc8ade0d67c5e142d564c9a83d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:6b4ef7725386dc0762857097f6b7266a6cdd62bfd209664da6712cb26acef035"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6bc0e697d4d79ab1aacbf20ee5f0df80359ecf55db33ff41481cf3e24f206919"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da52d62a96e61c1c444f3998c434e8b263c384f6d68aca8274d2e08d1906325c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:98e4fe5db40db87ce1c65031463a760ec7906ab230ad2249b4572c2fc3ef1f9f"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30bdc973f10d28e0337f71d202ff29345320f8bc49a31c90e6c257e1ccef4333"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:faa5e8496c530f9c71f2b4e1c49758b06e5f4055e17144906245c99fa6d45356"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32eb88c30b6a4f0605508023b7141d043a79b14acb3b969aa0b4f99b25bc7d4a"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a89a8ce9e4e75aeb7fa5d8ad0f3fecdee813802592f4f46a15754dcb2fd6b061"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:241e6c125568493f553c3d0fdbb38c74babf54b45cef86439d4cd97ff8feb34d"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:3b766a9f57663396e4f34f5140b3595b233a7b146e94777b97a8413a1da1be18"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:af4a644bf890f56e41e74be7d34e9511e4954894d544ec6b8efe1e21a1a8da6c"}, - {file = "rpds_py-0.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3e30a69a706e8ea20444b98a49f386c17b26f860aa9245329bab0851ed100677"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:031819f906bb146561af051c7cef4ba2003d28cff07efacef59da973ff7969ba"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:b876f2bc27ab5954e2fd88890c071bd0ed18b9c50f6ec3de3c50a5ece612f7a6"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc5695c321e518d9f03b7ea6abb5ea3af4567766f9852ad1560f501b17588c7b"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b4de1da871b5c0fd5537b26a6fc6814c3cc05cabe0c941db6e9044ffbb12f04a"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:878f6fea96621fda5303a2867887686d7a198d9e0f8a40be100a63f5d60c88c9"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8eeec67590e94189f434c6d11c426892e396ae59e4801d17a93ac96b8c02a6c"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ff2eba7f6c0cb523d7e9cff0903f2fe1feff8f0b2ceb6bd71c0e20a4dcee271"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a429b99337062877d7875e4ff1a51fe788424d522bd64a8c0a20ef3021fdb6ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:d167e4dbbdac48bd58893c7e446684ad5d425b407f9336e04ab52e8b9194e2ed"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eb2de8a147ffe0626bfdc275fc6563aa7bf4b6db59cf0d44f0ccd6ca625a24e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e78868e98f34f34a88e23ee9ccaeeec460e4eaf6db16d51d7a9b883e5e785a5e"}, - {file = "rpds_py-0.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4991ca61656e3160cdaca4851151fd3f4a92e9eba5c7a530ab030d6aee96ec89"}, - {file = "rpds_py-0.21.0.tar.gz", hash = "sha256:ed6378c9d66d0de903763e7706383d60c33829581f0adff47b6535f1802fa6db"}, +groups = ["dev"] +files = [ + {file = "rpds_py-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:006f4342fe729a368c6df36578d7a348c7c716be1da0a1a0f86e3021f8e98724"}, + {file = "rpds_py-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2d53747da70a4e4b17f559569d5f9506420966083a31c5fbd84e764461c4444b"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8acd55bd5b071156bae57b555f5d33697998752673b9de554dd82f5b5352727"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7e80d375134ddb04231a53800503752093dbb65dad8dabacce2c84cccc78e964"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60748789e028d2a46fc1c70750454f83c6bdd0d05db50f5ae83e2db500b34da5"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6e1daf5bf6c2be39654beae83ee6b9a12347cb5aced9a29eecf12a2d25fff664"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b221c2457d92a1fb3c97bee9095c874144d196f47c038462ae6e4a14436f7bc"}, + {file = "rpds_py-0.24.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66420986c9afff67ef0c5d1e4cdc2d0e5262f53ad11e4f90e5e22448df485bf0"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:43dba99f00f1d37b2a0265a259592d05fcc8e7c19d140fe51c6e6f16faabeb1f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a88c0d17d039333a41d9bf4616bd062f0bd7aa0edeb6cafe00a2fc2a804e944f"}, + {file = "rpds_py-0.24.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc31e13ce212e14a539d430428cd365e74f8b2d534f8bc22dd4c9c55b277b875"}, + {file = "rpds_py-0.24.0-cp310-cp310-win32.whl", hash = "sha256:fc2c1e1b00f88317d9de6b2c2b39b012ebbfe35fe5e7bef980fd2a91f6100a07"}, + {file = "rpds_py-0.24.0-cp310-cp310-win_amd64.whl", hash = "sha256:c0145295ca415668420ad142ee42189f78d27af806fcf1f32a18e51d47dd2052"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2d3ee4615df36ab8eb16c2507b11e764dcc11fd350bbf4da16d09cda11fcedef"}, + {file = "rpds_py-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e13ae74a8a3a0c2f22f450f773e35f893484fcfacb00bb4344a7e0f4f48e1f97"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf86f72d705fc2ef776bb7dd9e5fbba79d7e1f3e258bf9377f8204ad0fc1c51e"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c43583ea8517ed2e780a345dd9960896afc1327e8cf3ac8239c167530397440d"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4cd031e63bc5f05bdcda120646a0d32f6d729486d0067f09d79c8db5368f4586"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:34d90ad8c045df9a4259c47d2e16a3f21fdb396665c94520dbfe8766e62187a4"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e838bf2bb0b91ee67bf2b889a1a841e5ecac06dd7a2b1ef4e6151e2ce155c7ae"}, + {file = "rpds_py-0.24.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04ecf5c1ff4d589987b4d9882872f80ba13da7d42427234fce8f22efb43133bc"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:630d3d8ea77eabd6cbcd2ea712e1c5cecb5b558d39547ac988351195db433f6c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ebcb786b9ff30b994d5969213a8430cbb984cdd7ea9fd6df06663194bd3c450c"}, + {file = "rpds_py-0.24.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:174e46569968ddbbeb8a806d9922f17cd2b524aa753b468f35b97ff9c19cb718"}, + {file = "rpds_py-0.24.0-cp311-cp311-win32.whl", hash = "sha256:5ef877fa3bbfb40b388a5ae1cb00636a624690dcb9a29a65267054c9ea86d88a"}, + {file = "rpds_py-0.24.0-cp311-cp311-win_amd64.whl", hash = "sha256:e274f62cbd274359eff63e5c7e7274c913e8e09620f6a57aae66744b3df046d6"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:d8551e733626afec514b5d15befabea0dd70a343a9f23322860c4f16a9430205"}, + {file = "rpds_py-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e374c0ce0ca82e5b67cd61fb964077d40ec177dd2c4eda67dba130de09085c7"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d69d003296df4840bd445a5d15fa5b6ff6ac40496f956a221c4d1f6f7b4bc4d9"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8212ff58ac6dfde49946bea57474a386cca3f7706fc72c25b772b9ca4af6b79e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:528927e63a70b4d5f3f5ccc1fa988a35456eb5d15f804d276709c33fc2f19bda"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a824d2c7a703ba6daaca848f9c3d5cb93af0505be505de70e7e66829affd676e"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44d51febb7a114293ffd56c6cf4736cb31cd68c0fddd6aa303ed09ea5a48e029"}, + {file = "rpds_py-0.24.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3fab5f4a2c64a8fb64fc13b3d139848817a64d467dd6ed60dcdd6b479e7febc9"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9be4f99bee42ac107870c61dfdb294d912bf81c3c6d45538aad7aecab468b6b7"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:564c96b6076a98215af52f55efa90d8419cc2ef45d99e314fddefe816bc24f91"}, + {file = "rpds_py-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:75a810b7664c17f24bf2ffd7f92416c00ec84b49bb68e6a0d93e542406336b56"}, + {file = "rpds_py-0.24.0-cp312-cp312-win32.whl", hash = "sha256:f6016bd950be4dcd047b7475fdf55fb1e1f59fc7403f387be0e8123e4a576d30"}, + {file = "rpds_py-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:998c01b8e71cf051c28f5d6f1187abbdf5cf45fc0efce5da6c06447cba997034"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2d8e4508e15fc05b31285c4b00ddf2e0eb94259c2dc896771966a163122a0c"}, + {file = "rpds_py-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0f00c16e089282ad68a3820fd0c831c35d3194b7cdc31d6e469511d9bffc535c"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951cc481c0c395c4a08639a469d53b7d4afa252529a085418b82a6b43c45c240"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c9ca89938dff18828a328af41ffdf3902405a19f4131c88e22e776a8e228c5a8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed0ef550042a8dbcd657dfb284a8ee00f0ba269d3f2286b0493b15a5694f9fe8"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b2356688e5d958c4d5cb964af865bea84db29971d3e563fb78e46e20fe1848b"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78884d155fd15d9f64f5d6124b486f3d3f7fd7cd71a78e9670a0f6f6ca06fb2d"}, + {file = "rpds_py-0.24.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a4a535013aeeef13c5532f802708cecae8d66c282babb5cd916379b72110cf7"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:84e0566f15cf4d769dade9b366b7b87c959be472c92dffb70462dd0844d7cbad"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:823e74ab6fbaa028ec89615ff6acb409e90ff45580c45920d4dfdddb069f2120"}, + {file = "rpds_py-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c61a2cb0085c8783906b2f8b1f16a7e65777823c7f4d0a6aaffe26dc0d358dd9"}, + {file = "rpds_py-0.24.0-cp313-cp313-win32.whl", hash = "sha256:60d9b630c8025b9458a9d114e3af579a2c54bd32df601c4581bd054e85258143"}, + {file = "rpds_py-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:6eea559077d29486c68218178ea946263b87f1c41ae7f996b1f30a983c476a5a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:d09dc82af2d3c17e7dd17120b202a79b578d79f2b5424bda209d9966efeed114"}, + {file = "rpds_py-0.24.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5fc13b44de6419d1e7a7e592a4885b323fbc2f46e1f22151e3a8ed3b8b920405"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c347a20d79cedc0a7bd51c4d4b7dbc613ca4e65a756b5c3e57ec84bd43505b47"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20f2712bd1cc26a3cc16c5a1bfee9ed1abc33d4cdf1aabd297fe0eb724df4272"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad911555286884be1e427ef0dc0ba3929e6821cbeca2194b13dc415a462c7fd"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0aeb3329c1721c43c58cae274d7d2ca85c1690d89485d9c63a006cb79a85771a"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a0f156e9509cee987283abd2296ec816225145a13ed0391df8f71bf1d789e2d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:aa6800adc8204ce898c8a424303969b7aa6a5e4ad2789c13f8648739830323b7"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a18fc371e900a21d7392517c6f60fe859e802547309e94313cd8181ad9db004d"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9168764133fd919f8dcca2ead66de0105f4ef5659cbb4fa044f7014bed9a1797"}, + {file = "rpds_py-0.24.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f6e3cec44ba05ee5cbdebe92d052f69b63ae792e7d05f1020ac5e964394080c"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win32.whl", hash = "sha256:8ebc7e65ca4b111d928b669713865f021b7773350eeac4a31d3e70144297baba"}, + {file = "rpds_py-0.24.0-cp313-cp313t-win_amd64.whl", hash = "sha256:675269d407a257b8c00a6b58205b72eec8231656506c56fd429d924ca00bb350"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a36b452abbf29f68527cf52e181fced56685731c86b52e852053e38d8b60bc8d"}, + {file = "rpds_py-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b3b397eefecec8e8e39fa65c630ef70a24b09141a6f9fc17b3c3a50bed6b50e"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdabcd3beb2a6dca7027007473d8ef1c3b053347c76f685f5f060a00327b8b65"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5db385bacd0c43f24be92b60c857cf760b7f10d8234f4bd4be67b5b20a7c0b6b"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8097b3422d020ff1c44effc40ae58e67d93e60d540a65649d2cdaf9466030791"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493fe54318bed7d124ce272fc36adbf59d46729659b2c792e87c3b95649cdee9"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8aa362811ccdc1f8dadcc916c6d47e554169ab79559319ae9fae7d7752d0d60c"}, + {file = "rpds_py-0.24.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d8f9a6e7fd5434817526815f09ea27f2746c4a51ee11bb3439065f5fc754db58"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8205ee14463248d3349131bb8099efe15cd3ce83b8ef3ace63c7e976998e7124"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:921ae54f9ecba3b6325df425cf72c074cd469dea843fb5743a26ca7fb2ccb149"}, + {file = "rpds_py-0.24.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:32bab0a56eac685828e00cc2f5d1200c548f8bc11f2e44abf311d6b548ce2e45"}, + {file = "rpds_py-0.24.0-cp39-cp39-win32.whl", hash = "sha256:f5c0ed12926dec1dfe7d645333ea59cf93f4d07750986a586f511c0bc61fe103"}, + {file = "rpds_py-0.24.0-cp39-cp39-win_amd64.whl", hash = "sha256:afc6e35f344490faa8276b5f2f7cbf71f88bc2cda4328e00553bd451728c571f"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:619ca56a5468f933d940e1bf431c6f4e13bef8e688698b067ae68eb4f9b30e3a"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:4b28e5122829181de1898c2c97f81c0b3246d49f585f22743a1246420bb8d399"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e5ab32cf9eb3647450bc74eb201b27c185d3857276162c101c0f8c6374e098"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:208b3a70a98cf3710e97cabdc308a51cd4f28aa6e7bb11de3d56cd8b74bab98d"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bbc4362e06f950c62cad3d4abf1191021b2ffaf0b31ac230fbf0526453eee75e"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebea2821cdb5f9fef44933617be76185b80150632736f3d76e54829ab4a3b4d1"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a4df06c35465ef4d81799999bba810c68d29972bf1c31db61bfdb81dd9d5bb"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3aa13bdf38630da298f2e0d77aca967b200b8cc1473ea05248f6c5e9c9bdb44"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:041f00419e1da7a03c46042453598479f45be3d787eb837af382bfc169c0db33"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:d8754d872a5dfc3c5bf9c0e059e8107451364a30d9fd50f1f1a85c4fb9481164"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:896c41007931217a343eff197c34513c154267636c8056fb409eafd494c3dcdc"}, + {file = "rpds_py-0.24.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:92558d37d872e808944c3c96d0423b8604879a3d1c86fdad508d7ed91ea547d5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f9e0057a509e096e47c87f753136c9b10d7a91842d8042c2ee6866899a717c0d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d6e109a454412ab82979c5b1b3aee0604eca4bbf9a02693bb9df027af2bfa91a"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc1c892b1ec1f8cbd5da8de287577b455e388d9c328ad592eabbdcb6fc93bee5"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9c39438c55983d48f4bb3487734d040e22dad200dab22c41e331cee145e7a50d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9d7e8ce990ae17dda686f7e82fd41a055c668e13ddcf058e7fb5e9da20b57793"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9ea7f4174d2e4194289cb0c4e172d83e79a6404297ff95f2875cf9ac9bced8ba"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb2954155bb8f63bb19d56d80e5e5320b61d71084617ed89efedb861a684baea"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04f2b712a2206e13800a8136b07aaedc23af3facab84918e7aa89e4be0260032"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:eda5c1e2a715a4cbbca2d6d304988460942551e4e5e3b7457b50943cd741626d"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:9abc80fe8c1f87218db116016de575a7998ab1629078c90840e8d11ab423ee25"}, + {file = "rpds_py-0.24.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:6a727fd083009bc83eb83d6950f0c32b3c94c8b80a9b667c87f4bd1274ca30ba"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:e0f3ef95795efcd3b2ec3fe0a5bcfb5dadf5e3996ea2117427e524d4fbf309c6"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:2c13777ecdbbba2077670285dd1fe50828c8742f6a4119dbef6f83ea13ad10fb"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79e8d804c2ccd618417e96720ad5cd076a86fa3f8cb310ea386a3e6229bae7d1"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fd822f019ccccd75c832deb7aa040bb02d70a92eb15a2f16c7987b7ad4ee8d83"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0047638c3aa0dbcd0ab99ed1e549bbf0e142c9ecc173b6492868432d8989a046"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5b66d1b201cc71bc3081bc2f1fc36b0c1f268b773e03bbc39066651b9e18391"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbcbb6db5582ea33ce46a5d20a5793134b5365110d84df4e30b9d37c6fd40ad3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:63981feca3f110ed132fd217bf7768ee8ed738a55549883628ee3da75bb9cb78"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:3a55fc10fdcbf1a4bd3c018eea422c52cf08700cf99c28b5cb10fe97ab77a0d3"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:c30ff468163a48535ee7e9bf21bd14c7a81147c0e58a36c1078289a8ca7af0bd"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:369d9c6d4c714e36d4a03957b4783217a3ccd1e222cdd67d464a3a479fc17796"}, + {file = "rpds_py-0.24.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:24795c099453e3721fda5d8ddd45f5dfcc8e5a547ce7b8e9da06fecc3832e26f"}, + {file = "rpds_py-0.24.0.tar.gz", hash = "sha256:772cc1b2cd963e7e17e6cc55fe0371fb9c704d63e44cacec7b9b7f523b78919e"}, ] [[package]] name = "rsa" -version = "4.9" +version = "4.9.1" description = "Pure-Python RSA implementation" optional = false -python-versions = ">=3.6,<4" +python-versions = "<4,>=3.6" +groups = ["main"] files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, ] [package.dependencies] @@ -3386,17 +4134,18 @@ pyasn1 = ">=0.1.3" [[package]] name = "ruamel-yaml" -version = "0.18.6" +version = "0.18.14" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, + {file = "ruamel.yaml-0.18.14-py3-none-any.whl", hash = "sha256:710ff198bb53da66718c7db27eec4fbcc9aa6ca7204e4c1df2f282b6fe5eb6b2"}, + {file = "ruamel.yaml-0.18.14.tar.gz", hash = "sha256:7227b76aaec364df15936730efbf7d72b30c0b79b1d578bbb8e3dcb2d81f52b7"}, ] [package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} +"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.14\""} [package.extras] docs = ["mercurial (>5.7)", "ryd"] @@ -3408,6 +4157,8 @@ version = "0.2.12" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = false python-versions = ">=3.9" +groups = ["dev"] +markers = "platform_python_implementation == \"CPython\" and python_version == \"3.13\"" files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, @@ -3415,6 +4166,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, @@ -3423,6 +4175,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, @@ -3431,6 +4184,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, @@ -3439,6 +4193,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4f6f3eac23941b32afccc23081e1f50612bdbe4e982012ef4f5797986828cd01"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, @@ -3447,6 +4202,7 @@ files = [ {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, @@ -3454,40 +4210,42 @@ files = [ [[package]] name = "ruff" -version = "0.8.1" +version = "0.8.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" -files = [ - {file = "ruff-0.8.1-py3-none-linux_armv6l.whl", hash = "sha256:fae0805bd514066f20309f6742f6ee7904a773eb9e6c17c45d6b1600ca65c9b5"}, - {file = "ruff-0.8.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b8a4f7385c2285c30f34b200ca5511fcc865f17578383db154e098150ce0a087"}, - {file = "ruff-0.8.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd054486da0c53e41e0086e1730eb77d1f698154f910e0cd9e0d64274979a209"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2029b8c22da147c50ae577e621a5bfbc5d1fed75d86af53643d7a7aee1d23871"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2666520828dee7dfc7e47ee4ea0d928f40de72056d929a7c5292d95071d881d1"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:333c57013ef8c97a53892aa56042831c372e0bb1785ab7026187b7abd0135ad5"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:288326162804f34088ac007139488dcb43de590a5ccfec3166396530b58fb89d"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b12c39b9448632284561cbf4191aa1b005882acbc81900ffa9f9f471c8ff7e26"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:364e6674450cbac8e998f7b30639040c99d81dfb5bbc6dfad69bc7a8f916b3d1"}, - {file = "ruff-0.8.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b22346f845fec132aa39cd29acb94451d030c10874408dbf776af3aaeb53284c"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b2f2f7a7e7648a2bfe6ead4e0a16745db956da0e3a231ad443d2a66a105c04fa"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:adf314fc458374c25c5c4a4a9270c3e8a6a807b1bec018cfa2813d6546215540"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:a885d68342a231b5ba4d30b8c6e1b1ee3a65cf37e3d29b3c74069cdf1ee1e3c9"}, - {file = "ruff-0.8.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d2c16e3508c8cc73e96aa5127d0df8913d2290098f776416a4b157657bee44c5"}, - {file = "ruff-0.8.1-py3-none-win32.whl", hash = "sha256:93335cd7c0eaedb44882d75a7acb7df4b77cd7cd0d2255c93b28791716e81790"}, - {file = "ruff-0.8.1-py3-none-win_amd64.whl", hash = "sha256:2954cdbe8dfd8ab359d4a30cd971b589d335a44d444b6ca2cb3d1da21b75e4b6"}, - {file = "ruff-0.8.1-py3-none-win_arm64.whl", hash = "sha256:55873cc1a473e5ac129d15eccb3c008c096b94809d693fc7053f588b67822737"}, - {file = "ruff-0.8.1.tar.gz", hash = "sha256:3583db9a6450364ed5ca3f3b4225958b24f78178908d5c4bc0f46251ccca898f"}, +groups = ["dev"] +files = [ + {file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"}, + {file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"}, + {file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"}, + {file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"}, + {file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"}, + {file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"}, + {file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"}, + {file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"}, + {file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"}, ] [[package]] name = "sanic" -version = "24.6.0" +version = "24.12.0" description = "A web server and web framework that's written to go fast. Build fast. Run fast." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "sanic-24.6.0-py3-none-any.whl", hash = "sha256:e2c6b392e213d85d9843cf27c64e3f2dacb3ec5c31c8c7ade4c404cd3030e994"}, - {file = "sanic-24.6.0.tar.gz", hash = "sha256:2e0841e2c8c28e68a0e6fc570c42aafbbe3b385d7141b9f96997d9d6c17d7afb"}, + {file = "sanic-24.12.0-py3-none-any.whl", hash = "sha256:3c2a01ec0b6c5926e3efe34eac1b497d31ed989038fe213eb25ad0c98687d388"}, + {file = "sanic-24.12.0.tar.gz", hash = "sha256:09c23aa917616c1e60e44c66dfd7582cb9fd6503f78298c309945909f5839836"}, ] [package.dependencies] @@ -3505,30 +4263,31 @@ uvloop = {version = ">=0.15.0", markers = "sys_platform != \"win32\" and impleme websockets = ">=10.0" [package.extras] -all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)", "towncrier", "tox", "types-ujson", "uvicorn"] -dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson", "uvicorn"] -docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)"] +all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)"] ext = ["sanic-ext"] http3 = ["aioquic"] -test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson", "uvicorn"] +test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] [[package]] name = "sanic-ext" -version = "23.12.0" +version = "24.12.0" description = "Extend your Sanic installation with some core functionality." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "sanic-ext-23.12.0.tar.gz", hash = "sha256:42fc41e7fafa58f3b790f685f3dd8a8de281460b4169d0e91f4e11b8747f845c"}, - {file = "sanic_ext-23.12.0-py3-none-any.whl", hash = "sha256:3ba2c143d7c41d89b87a11c6214b9d9b52c3994ff8ce3a03792b54ec5627e2c3"}, + {file = "sanic_ext-24.12.0-py3-none-any.whl", hash = "sha256:861f809f071770cf28acd5f13e97ed59985e07361b13b4b4540da1333730c83e"}, + {file = "sanic_ext-24.12.0.tar.gz", hash = "sha256:8f912f4c29f242bc638346d09b79f0c8896ff64e79bd0e7fa09eac4b6c0e23c8"}, ] [package.dependencies] pyyaml = ">=3.0.0" [package.extras] -dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] -test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] +dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] +test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] [[package]] name = "sanic-routing" @@ -3536,6 +4295,7 @@ version = "23.12.0" description = "Core routing component for Sanic" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}, {file = "sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}, @@ -3547,6 +4307,7 @@ version = "24.6.0" description = "Core testing clients for Sanic" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "sanic_testing-24.6.0-py3-none-any.whl", hash = "sha256:b1027184735e88230891aa0461fff84093abfa3bff0f4d29c0f78f42e59efada"}, {file = "sanic_testing-24.6.0.tar.gz", hash = "sha256:7591ce537e2a651efb6dc01b458e7e4ea5347f6d91438676774c6f505a124731"}, @@ -3556,32 +4317,34 @@ files = [ httpx = ">=0.18" [package.extras] -dev = ["pytest", "pytest-asyncio", "sanic (>=22.12)", "setuptools"] +dev = ["pytest", "pytest-asyncio", "sanic (>=22.12)", "setuptools ; python_version > \"3.11\""] [[package]] name = "schemathesis" -version = "3.29.2" +version = "3.39.7" description = "Property-based testing framework for Open API and GraphQL based apps" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "schemathesis-3.29.2-py3-none-any.whl", hash = "sha256:57571503da60c5ceb67f31bd55895b7bcd80b36b78c36541865e844f843eb839"}, - {file = "schemathesis-3.29.2.tar.gz", hash = "sha256:b32f75bfdb5a56c5fea56e9cdc269cb5fa20549db3266632e22fbab343ed04e3"}, + {file = "schemathesis-3.39.7-py3-none-any.whl", hash = "sha256:4173291ee0180bf320941a115e1662dfcb0955039515373457951bd417a00f4a"}, + {file = "schemathesis-3.39.7.tar.gz", hash = "sha256:370c5629286317836c97cd9a0fcf97313195e25d0a5ab66349e4f60a5e9cb626"}, ] [package.dependencies] backoff = ">=2.1.2,<3.0" click = ">=7.0,<9.0" colorama = ">=0.4,<1.0" +harfile = ">=0.3.0,<1.0" httpx = ">=0.22.0,<1.0" -hypothesis = {version = ">=6.84.3,<7", markers = "python_version > \"3.8\""} -hypothesis-graphql = ">=0.11.0,<1" +hypothesis = {version = ">=6.103.4,<7", markers = "python_version > \"3.8\""} +hypothesis-graphql = ">=0.11.1,<1" hypothesis-jsonschema = ">=0.23.1,<0.24" -jsonschema = ">=4.18.0,<5.0" +jsonschema = {version = ">=4.18.0,<5.0", extras = ["format"]} junit-xml = ">=1.9,<2.0" pyrate-limiter = ">=2.10,<4.0" pytest = ">=4.6.4,<9" -pytest-subtests = ">=0.2.1,<0.8.0" +pytest-subtests = ">=0.2.1,<0.15.0" pyyaml = ">=5.1,<7.0" requests = ">=2.22,<3" starlette = ">=0.13,<1" @@ -3594,19 +4357,37 @@ yarl = ">=1.5,<2.0" [package.extras] bench = ["pytest-codspeed (==2.2.1)"] cov = ["coverage-enable-subprocess", "coverage[toml] (>=5.3)"] -dev = ["schemathesis[bench,cov,docs,tests]"] +dev = ["aiohttp (>=3.9.1,<4.0)", "coverage (>=6)", "coverage-enable-subprocess", "coverage[toml] (>=5.3)", "fastapi (>=0.86.0)", "flask (>=2.1.1,<3.0)", "hypothesis-openapi (>=0.2,<1) ; python_version >= \"3.10\"", "pydantic (>=1.10.2)", "pytest-asyncio (>=0.18.0,<1.0)", "pytest-codspeed (==2.2.1)", "pytest-httpserver (>=1.0,<2.0)", "pytest-mock (>=3.7.0,<4.0)", "pytest-trio (>=0.8,<1.0)", "pytest-xdist (>=3,<4.0)", "sphinx", "sphinx-click", "sphinx-rtd-theme", "strawberry-graphql[fastapi] (>=0.109.0)", "syrupy (>=2,<5.0)", "trustme (>=0.9.0,<1.0)"] docs = ["sphinx", "sphinx-click", "sphinx-rtd-theme"] -tests = ["aiohttp (>=3.9.1,<4.0)", "coverage (>=6)", "fastapi (>=0.86.0)", "flask (>=2.1.1,<3.0)", "pydantic (>=1.10.2)", "pytest-asyncio (>=0.18.0,<1.0)", "pytest-httpserver (>=1.0,<2.0)", "pytest-mock (>=3.7.0,<4.0)", "pytest-trio (>=0.8,<1.0)", "pytest-xdist (>=3,<4.0)", "strawberry-graphql[fastapi] (>=0.109.0)", "syrupy (>=2,<5.0)", "trustme (>=0.9.0,<1.0)"] +tests = ["aiohttp (>=3.9.1,<4.0)", "coverage (>=6)", "fastapi (>=0.86.0)", "flask (>=2.1.1,<3.0)", "hypothesis-openapi (>=0.2,<1) ; python_version >= \"3.10\"", "pydantic (>=1.10.2)", "pytest-asyncio (>=0.18.0,<1.0)", "pytest-httpserver (>=1.0,<2.0)", "pytest-mock (>=3.7.0,<4.0)", "pytest-trio (>=0.8,<1.0)", "pytest-xdist (>=3,<4.0)", "strawberry-graphql[fastapi] (>=0.109.0)", "syrupy (>=2,<5.0)", "trustme (>=0.9.0,<1.0)"] + +[[package]] +name = "secretstorage" +version = "3.3.3" +description = "Python bindings to FreeDesktop.org Secret Service API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +markers = "sys_platform == \"linux\"" +files = [ + {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, + {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, +] + +[package.dependencies] +cryptography = ">=2.0" +jeepney = ">=0.6" [[package]] name = "sentry-sdk" -version = "2.19.0" +version = "2.26.1" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"}, - {file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"}, + {file = "sentry_sdk-2.26.1-py2.py3-none-any.whl", hash = "sha256:e99390e3f217d13ddcbaeaed08789f1ca614d663b345b9da42e35ad6b60d696a"}, + {file = "sentry_sdk-2.26.1.tar.gz", hash = "sha256:759e019c41551a21519a95e6cef6d91fb4af1054761923dadaee2e6eca9c02c7"}, ] [package.dependencies] @@ -3651,37 +4432,53 @@ sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] starlette = ["starlette (>=0.19.1)"] starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] [[package]] name = "setuptools" -version = "75.6.0" +version = "75.9.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, + {file = "setuptools-75.9.1-py3-none-any.whl", hash = "sha256:0a6f876d62f4d978ca1a11ab4daf728d1357731f978543ff18ecdbf9fd071f73"}, + {file = "setuptools-75.9.1.tar.gz", hash = "sha256:b6eca2c3070cdc82f71b4cb4bb2946bc0760a210d11362278cf1ff394e6ea32c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "shellingham" +version = "1.5.4" +description = "Tool to Detect Surrounding Shell" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, + {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, +] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -3690,6 +4487,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -3701,6 +4499,7 @@ version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -3708,80 +4507,81 @@ files = [ [[package]] name = "sqlalchemy" -version = "2.0.36" +version = "2.0.40" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -3792,7 +4592,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -3803,20 +4603,21 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "starlette" -version = "0.41.3" +version = "0.46.2" description = "The little ASGI library that shines." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7"}, - {file = "starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835"}, + {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, + {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, ] [package.dependencies] -anyio = ">=3.4.0,<5" +anyio = ">=3.6.2,<5" [package.extras] -full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7)", "pyyaml"] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] [[package]] name = "starlette-testclient" @@ -3824,6 +4625,7 @@ version = "0.4.1" description = "A backport of Starlette TestClient using requests! ⏪️" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "starlette_testclient-0.4.1-py3-none-any.whl", hash = "sha256:dcf0eb237dc47f062ef5925f98330af46f67e547cb587119c9ae78c17ae6c1d1"}, {file = "starlette_testclient-0.4.1.tar.gz", hash = "sha256:9e993ffe12fab45606116257813986612262fe15c1bb6dc9e39cc68693ac1fc5"}, @@ -3835,27 +4637,44 @@ starlette = ">=0.20.1" [[package]] name = "stevedore" -version = "5.4.0" +version = "5.4.1" description = "Manage dynamic plugins for Python applications" optional = false python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "stevedore-5.4.0-py3-none-any.whl", hash = "sha256:b0be3c4748b3ea7b854b265dcb4caa891015e442416422be16f8b31756107857"}, - {file = "stevedore-5.4.0.tar.gz", hash = "sha256:79e92235ecb828fe952b6b8b0c6c87863248631922c8e8e0fa5b17b232c4514d"}, + {file = "stevedore-5.4.1-py3-none-any.whl", hash = "sha256:d10a31c7b86cba16c1f6e8d15416955fc797052351a56af15e608ad20811fcfe"}, + {file = "stevedore-5.4.1.tar.gz", hash = "sha256:3135b5ae50fe12816ef291baff420acb727fcd356106e3e9cbfa9e5985cd6f4b"}, ] [package.dependencies] pbr = ">=2.0.0" +[[package]] +name = "syrupy" +version = "4.9.1" +description = "Pytest Snapshot Test Utility" +optional = false +python-versions = ">=3.8.1" +groups = ["dev"] +files = [ + {file = "syrupy-4.9.1-py3-none-any.whl", hash = "sha256:b94cc12ed0e5e75b448255430af642516842a2374a46936dd2650cfb6dd20eda"}, + {file = "syrupy-4.9.1.tar.gz", hash = "sha256:b7d0fcadad80a7d2f6c4c71917918e8ebe2483e8c703dfc8d49cdbb01081f9a4"}, +] + +[package.dependencies] +pytest = ">=7.0.0,<9.0.0" + [[package]] name = "tenacity" -version = "9.0.0" +version = "9.1.2" description = "Retry code until it succeeds" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, ] [package.extras] @@ -3868,6 +4687,7 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -3875,24 +4695,68 @@ files = [ [[package]] name = "tomli" -version = "2.1.0" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" -files = [ - {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, - {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, +groups = ["dev"] +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] name = "tomli-w" -version = "1.1.0" +version = "1.2.0" description = "A lil' TOML writer" optional = false python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "tomli_w-1.2.0-py3-none-any.whl", hash = "sha256:188306098d013b691fcadc011abd66727d3c414c571bb01b1a174ba8c983cf90"}, + {file = "tomli_w-1.2.0.tar.gz", hash = "sha256:2dd14fac5a47c27be9cd4c976af5a12d87fb1f0b4512f81d69cce3b35ae25021"}, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +description = "Style preserving TOML library" +optional = false +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "tomli_w-1.1.0-py3-none-any.whl", hash = "sha256:1403179c78193e3184bfaade390ddbd071cba48a32a2e62ba11aae47490c63f7"}, - {file = "tomli_w-1.1.0.tar.gz", hash = "sha256:49e847a3a304d516a169a601184932ef0f6b61623fe680f836a2aa7128ed0d33"}, + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, ] [[package]] @@ -3901,6 +4765,7 @@ version = "1.1.1" description = "Human-readable HTML tracebacks for Python exceptions" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "tracerite-1.1.1-py3-none-any.whl", hash = "sha256:3a787a9ecb1a136ea9ce17e6328e414ec414a4f644130af4e1e330bec2dece29"}, {file = "tracerite-1.1.1.tar.gz", hash = "sha256:6400a35a187747189e4bb8d4a8e471bd86d14dbdcc94bcad23f4eda023f41356"}, @@ -3910,102 +4775,75 @@ files = [ html5tagger = ">=1.2.1" [[package]] -name = "types-aiofiles" -version = "24.1.0.20240626" -description = "Typing stubs for aiofiles" +name = "trove-classifiers" +version = "2025.4.11.15" +description = "Canonical source for classifiers on PyPI (pypi.org)." optional = false -python-versions = ">=3.8" +python-versions = "*" +groups = ["main"] files = [ - {file = "types-aiofiles-24.1.0.20240626.tar.gz", hash = "sha256:48604663e24bc2d5038eac05ccc33e75799b0779e93e13d6a8f711ddc306ac08"}, - {file = "types_aiofiles-24.1.0.20240626-py3-none-any.whl", hash = "sha256:7939eca4a8b4f9c6491b6e8ef160caee9a21d32e18534a57d5ed90aee47c66b4"}, + {file = "trove_classifiers-2025.4.11.15-py3-none-any.whl", hash = "sha256:e7d98983f004df35293caf954bdfe944b139eb402677a97115450e320f0bd855"}, + {file = "trove_classifiers-2025.4.11.15.tar.gz", hash = "sha256:634728aa6698dc1ae3db161da94d9e4c7597a9a5da2c4410211b36f15fed60fc"}, ] [[package]] -name = "types-cffi" -version = "1.16.0.20240331" -description = "Typing stubs for cffi" +name = "types-aiofiles" +version = "24.1.0.20250326" +description = "Typing stubs for aiofiles" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, - {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, + {file = "types_aiofiles-24.1.0.20250326-py3-none-any.whl", hash = "sha256:dfb58c9aa18bd449e80fb5d7f49dc3dd20d31de920a46223a61798ee4a521a70"}, + {file = "types_aiofiles-24.1.0.20250326.tar.gz", hash = "sha256:c4bbe432fd043911ba83fb635456f5cc54f6d05fda2aadf6bef12a84f07a6efe"}, ] -[package.dependencies] -types-setuptools = "*" - [[package]] -name = "types-pyopenssl" -version = "24.1.0.20240722" -description = "Typing stubs for pyOpenSSL" +name = "types-python-dateutil" +version = "2.9.0.20241206" +description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "types-pyOpenSSL-24.1.0.20240722.tar.gz", hash = "sha256:47913b4678a01d879f503a12044468221ed8576263c1540dcb0484ca21b08c39"}, - {file = "types_pyOpenSSL-24.1.0.20240722-py3-none-any.whl", hash = "sha256:6a7a5d2ec042537934cfb4c9d4deb0e16c4c6250b09358df1f083682fe6fda54"}, + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] -[package.dependencies] -cryptography = ">=35.0.0" -types-cffi = "*" - [[package]] name = "types-pyyaml" -version = "6.0.12.20240917" +version = "6.0.12.20250402" description = "Typing stubs for PyYAML" optional = false -python-versions = ">=3.8" -files = [ - {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, - {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, -] - -[[package]] -name = "types-redis" -version = "4.6.0.20241004" -description = "Typing stubs for redis" -optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, - {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, ] -[package.dependencies] -cryptography = ">=35.0.0" -types-pyOpenSSL = "*" - [[package]] name = "types-requests" -version = "2.32.0.20241016" +version = "2.32.0.20250328" description = "Typing stubs for requests" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, - {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, + {file = "types_requests-2.32.0.20250328-py3-none-any.whl", hash = "sha256:72ff80f84b15eb3aa7a8e2625fffb6a93f2ad5a0c20215fc1dcfa61117bcb2a2"}, + {file = "types_requests-2.32.0.20250328.tar.gz", hash = "sha256:c9e67228ea103bd811c96984fac36ed2ae8da87a36a633964a21f199d60baf32"}, ] [package.dependencies] urllib3 = ">=2" -[[package]] -name = "types-setuptools" -version = "75.5.0.20241122" -description = "Typing stubs for setuptools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types_setuptools-75.5.0.20241122-py3-none-any.whl", hash = "sha256:d69c445f7bdd5e49d1b2441aadcee1388febcc9ad9d9d5fd33648b555e0b1c31"}, - {file = "types_setuptools-75.5.0.20241122.tar.gz", hash = "sha256:196aaf1811cbc1c77ac1d4c4879d5308b6fdf426e56b73baadbca2a1827dadef"}, -] - [[package]] name = "types-toml" version = "0.10.8.20240310" description = "Typing stubs for toml" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "types-toml-0.10.8.20240310.tar.gz", hash = "sha256:3d41501302972436a6b8b239c850b26689657e25281b48ff0ec06345b8830331"}, {file = "types_toml-0.10.8.20240310-py3-none-any.whl", hash = "sha256:627b47775d25fa29977d9c70dc0cbab3f314f32c8d8d0c012f2ef5de7aaec05d"}, @@ -4017,6 +4855,7 @@ version = "1.26.25.14" description = "Typing stubs for urllib3" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "types-urllib3-1.26.25.14.tar.gz", hash = "sha256:229b7f577c951b8c1b92c1bc2b2fdb0b49847bd2af6d1cc2a2e3dd340f3bda8f"}, {file = "types_urllib3-1.26.25.14-py3-none-any.whl", hash = "sha256:9683bbb7fb72e32bfe9d2be6e04875fbe1b3eeec3cbb4ea231435aa7fd6b4f0e"}, @@ -4024,24 +4863,42 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, ] +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "tzdata" -version = "2024.2" +version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main", "dev"] +markers = "sys_platform == \"win32\"" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] @@ -4050,6 +4907,8 @@ version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" files = [ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, @@ -4137,24 +4996,41 @@ version = "0.11.3" description = "Type-checked function calls at runtime" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "undictify-0.11.3-py3-none-any.whl", hash = "sha256:4bfdc075b2f06ee027b05e241434c8efcbebf6c83fcc5b8d9d8def56dab4b5ff"}, {file = "undictify-0.11.3.tar.gz", hash = "sha256:1481170ed8b9862c033e7549d817b90cead6002677c602d1bbdbf8ea15100098"}, ] +[[package]] +name = "uri-template" +version = "1.3.0" +description = "RFC 6570 URI Template Processor" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "uri-template-1.3.0.tar.gz", hash = "sha256:0e00f8eb65e18c7de20d595a14336e9f337ead580c70934141624b6d1ffdacc7"}, + {file = "uri_template-1.3.0-py3-none-any.whl", hash = "sha256:a44a133ea12d44a0c0f06d7d42a52d71282e77e2f937d8abd5655b8d56fc1363"}, +] + +[package.extras] +dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake8-commas", "flake8-comprehensions", "flake8-continuation", "flake8-datetimez", "flake8-docstrings", "flake8-import-order", "flake8-literal", "flake8-modern-annotations", "flake8-noqa", "flake8-pyproject", "flake8-requirements", "flake8-typechecking-import", "flake8-use-fstring", "mypy", "pep8-naming", "types-PyYAML"] + [[package]] name = "urllib3" -version = "2.2.3" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -4165,6 +5041,7 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["main", "dev"] files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -4212,13 +5089,14 @@ test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", [[package]] name = "virtualenv" -version = "20.27.1" +version = "20.30.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4"}, - {file = "virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba"}, + {file = "virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6"}, + {file = "virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8"}, ] [package.dependencies] @@ -4228,7 +5106,19 @@ platformdirs = ">=3.9.1,<5" [package.extras] docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "webcolors" +version = "24.11.1" +description = "A library for working with the color formats defined by HTML and CSS." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "webcolors-24.11.1-py3-none-any.whl", hash = "sha256:515291393b4cdf0eb19c155749a096f779f7d909f7cceea072791cb9095b92e9"}, + {file = "webcolors-24.11.1.tar.gz", hash = "sha256:ecb3d768f32202af770477b8b65f318fa4f566c22948673a977b00d589dd80f6"}, +] [[package]] name = "websocket-client" @@ -4236,6 +5126,7 @@ version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -4248,80 +5139,81 @@ test = ["websockets"] [[package]] name = "websockets" -version = "14.1" +version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false python-versions = ">=3.9" -files = [ - {file = "websockets-14.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a0adf84bc2e7c86e8a202537b4fd50e6f7f0e4a6b6bf64d7ccb96c4cd3330b29"}, - {file = "websockets-14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90b5d9dfbb6d07a84ed3e696012610b6da074d97453bd01e0e30744b472c8179"}, - {file = "websockets-14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2177ee3901075167f01c5e335a6685e71b162a54a89a56001f1c3e9e3d2ad250"}, - {file = "websockets-14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f14a96a0034a27f9d47fd9788913924c89612225878f8078bb9d55f859272b0"}, - {file = "websockets-14.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f874ba705deea77bcf64a9da42c1f5fc2466d8f14daf410bc7d4ceae0a9fcb0"}, - {file = "websockets-14.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9607b9a442392e690a57909c362811184ea429585a71061cd5d3c2b98065c199"}, - {file = "websockets-14.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bea45f19b7ca000380fbd4e02552be86343080120d074b87f25593ce1700ad58"}, - {file = "websockets-14.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:219c8187b3ceeadbf2afcf0f25a4918d02da7b944d703b97d12fb01510869078"}, - {file = "websockets-14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad2ab2547761d79926effe63de21479dfaf29834c50f98c4bf5b5480b5838434"}, - {file = "websockets-14.1-cp310-cp310-win32.whl", hash = "sha256:1288369a6a84e81b90da5dbed48610cd7e5d60af62df9851ed1d1d23a9069f10"}, - {file = "websockets-14.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0744623852f1497d825a49a99bfbec9bea4f3f946df6eb9d8a2f0c37a2fec2e"}, - {file = "websockets-14.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:449d77d636f8d9c17952628cc7e3b8faf6e92a17ec581ec0c0256300717e1512"}, - {file = "websockets-14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a35f704be14768cea9790d921c2c1cc4fc52700410b1c10948511039be824aac"}, - {file = "websockets-14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b1f3628a0510bd58968c0f60447e7a692933589b791a6b572fcef374053ca280"}, - {file = "websockets-14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c3deac3748ec73ef24fc7be0b68220d14d47d6647d2f85b2771cb35ea847aa1"}, - {file = "websockets-14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7048eb4415d46368ef29d32133134c513f507fff7d953c18c91104738a68c3b3"}, - {file = "websockets-14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cf0ad281c979306a6a34242b371e90e891bce504509fb6bb5246bbbf31e7b6"}, - {file = "websockets-14.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cc1fc87428c1d18b643479caa7b15db7d544652e5bf610513d4a3478dbe823d0"}, - {file = "websockets-14.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f95ba34d71e2fa0c5d225bde3b3bdb152e957150100e75c86bc7f3964c450d89"}, - {file = "websockets-14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9481a6de29105d73cf4515f2bef8eb71e17ac184c19d0b9918a3701c6c9c4f23"}, - {file = "websockets-14.1-cp311-cp311-win32.whl", hash = "sha256:368a05465f49c5949e27afd6fbe0a77ce53082185bbb2ac096a3a8afaf4de52e"}, - {file = "websockets-14.1-cp311-cp311-win_amd64.whl", hash = "sha256:6d24fc337fc055c9e83414c94e1ee0dee902a486d19d2a7f0929e49d7d604b09"}, - {file = "websockets-14.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ed907449fe5e021933e46a3e65d651f641975a768d0649fee59f10c2985529ed"}, - {file = "websockets-14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:87e31011b5c14a33b29f17eb48932e63e1dcd3fa31d72209848652310d3d1f0d"}, - {file = "websockets-14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bc6ccf7d54c02ae47a48ddf9414c54d48af9c01076a2e1023e3b486b6e72c707"}, - {file = "websockets-14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9777564c0a72a1d457f0848977a1cbe15cfa75fa2f67ce267441e465717dcf1a"}, - {file = "websockets-14.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a655bde548ca98f55b43711b0ceefd2a88a71af6350b0c168aa77562104f3f45"}, - {file = "websockets-14.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3dfff83ca578cada2d19e665e9c8368e1598d4e787422a460ec70e531dbdd58"}, - {file = "websockets-14.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6a6c9bcf7cdc0fd41cc7b7944447982e8acfd9f0d560ea6d6845428ed0562058"}, - {file = "websockets-14.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4b6caec8576e760f2c7dd878ba817653144d5f369200b6ddf9771d64385b84d4"}, - {file = "websockets-14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb6d38971c800ff02e4a6afd791bbe3b923a9a57ca9aeab7314c21c84bf9ff05"}, - {file = "websockets-14.1-cp312-cp312-win32.whl", hash = "sha256:1d045cbe1358d76b24d5e20e7b1878efe578d9897a25c24e6006eef788c0fdf0"}, - {file = "websockets-14.1-cp312-cp312-win_amd64.whl", hash = "sha256:90f4c7a069c733d95c308380aae314f2cb45bd8a904fb03eb36d1a4983a4993f"}, - {file = "websockets-14.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3630b670d5057cd9e08b9c4dab6493670e8e762a24c2c94ef312783870736ab9"}, - {file = "websockets-14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:36ebd71db3b89e1f7b1a5deaa341a654852c3518ea7a8ddfdf69cc66acc2db1b"}, - {file = "websockets-14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5b918d288958dc3fa1c5a0b9aa3256cb2b2b84c54407f4813c45d52267600cd3"}, - {file = "websockets-14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00fe5da3f037041da1ee0cf8e308374e236883f9842c7c465aa65098b1c9af59"}, - {file = "websockets-14.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8149a0f5a72ca36720981418eeffeb5c2729ea55fa179091c81a0910a114a5d2"}, - {file = "websockets-14.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77569d19a13015e840b81550922056acabc25e3f52782625bc6843cfa034e1da"}, - {file = "websockets-14.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cf5201a04550136ef870aa60ad3d29d2a59e452a7f96b94193bee6d73b8ad9a9"}, - {file = "websockets-14.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:88cf9163ef674b5be5736a584c999e98daf3aabac6e536e43286eb74c126b9c7"}, - {file = "websockets-14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:836bef7ae338a072e9d1863502026f01b14027250a4545672673057997d5c05a"}, - {file = "websockets-14.1-cp313-cp313-win32.whl", hash = "sha256:0d4290d559d68288da9f444089fd82490c8d2744309113fc26e2da6e48b65da6"}, - {file = "websockets-14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8621a07991add373c3c5c2cf89e1d277e49dc82ed72c75e3afc74bd0acc446f0"}, - {file = "websockets-14.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:01bb2d4f0a6d04538d3c5dfd27c0643269656c28045a53439cbf1c004f90897a"}, - {file = "websockets-14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:414ffe86f4d6f434a8c3b7913655a1a5383b617f9bf38720e7c0799fac3ab1c6"}, - {file = "websockets-14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8fda642151d5affdee8a430bd85496f2e2517be3a2b9d2484d633d5712b15c56"}, - {file = "websockets-14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd7c11968bc3860d5c78577f0dbc535257ccec41750675d58d8dc66aa47fe52c"}, - {file = "websockets-14.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a032855dc7db987dff813583d04f4950d14326665d7e714d584560b140ae6b8b"}, - {file = "websockets-14.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7e7ea2f782408c32d86b87a0d2c1fd8871b0399dd762364c731d86c86069a78"}, - {file = "websockets-14.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:39450e6215f7d9f6f7bc2a6da21d79374729f5d052333da4d5825af8a97e6735"}, - {file = "websockets-14.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:ceada5be22fa5a5a4cdeec74e761c2ee7db287208f54c718f2df4b7e200b8d4a"}, - {file = "websockets-14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3fc753451d471cff90b8f467a1fc0ae64031cf2d81b7b34e1811b7e2691bc4bc"}, - {file = "websockets-14.1-cp39-cp39-win32.whl", hash = "sha256:14839f54786987ccd9d03ed7f334baec0f02272e7ec4f6e9d427ff584aeea8b4"}, - {file = "websockets-14.1-cp39-cp39-win_amd64.whl", hash = "sha256:d9fd19ecc3a4d5ae82ddbfb30962cf6d874ff943e56e0c81f5169be2fda62979"}, - {file = "websockets-14.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5dc25a9dbd1a7f61eca4b7cb04e74ae4b963d658f9e4f9aad9cd00b688692c8"}, - {file = "websockets-14.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:04a97aca96ca2acedf0d1f332c861c5a4486fdcba7bcef35873820f940c4231e"}, - {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df174ece723b228d3e8734a6f2a6febbd413ddec39b3dc592f5a4aa0aff28098"}, - {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:034feb9f4286476f273b9a245fb15f02c34d9586a5bc936aff108c3ba1b21beb"}, - {file = "websockets-14.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c308dabd2b380807ab64b62985eaccf923a78ebc572bd485375b9ca2b7dc7"}, - {file = "websockets-14.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5a42d3ecbb2db5080fc578314439b1d79eef71d323dc661aa616fb492436af5d"}, - {file = "websockets-14.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:ddaa4a390af911da6f680be8be4ff5aaf31c4c834c1a9147bc21cbcbca2d4370"}, - {file = "websockets-14.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a4c805c6034206143fbabd2d259ec5e757f8b29d0a2f0bf3d2fe5d1f60147a4a"}, - {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:205f672a6c2c671a86d33f6d47c9b35781a998728d2c7c2a3e1cf3333fcb62b7"}, - {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef440054124728cc49b01c33469de06755e5a7a4e83ef61934ad95fc327fbb0"}, - {file = "websockets-14.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7591d6f440af7f73c4bd9404f3772bfee064e639d2b6cc8c94076e71b2471c1"}, - {file = "websockets-14.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:25225cc79cfebc95ba1d24cd3ab86aaa35bcd315d12fa4358939bd55e9bd74a5"}, - {file = "websockets-14.1-py3-none-any.whl", hash = "sha256:4d4fc827a20abe6d544a119896f6b78ee13fe81cbfef416f3f2ddf09a03f0e2e"}, - {file = "websockets-14.1.tar.gz", hash = "sha256:398b10c77d471c0aab20a845e7a60076b6390bfdaac7a6d2edb0d2c59d75e8d8"}, +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] [[package]] @@ -4330,6 +5222,7 @@ version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" +groups = ["main", "dev"] files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -4347,6 +5240,7 @@ version = "1.2.0" description = "WebSockets state-machine based protocol implementation" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, @@ -4355,103 +5249,326 @@ files = [ [package.dependencies] h11 = ">=0.9.0,<1" +[[package]] +name = "xattr" +version = "1.1.4" +description = "Python wrapper for extended filesystem attributes" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform == \"darwin\"" +files = [ + {file = "xattr-1.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:acb85b6249e9f3ea10cbb56df1021d43f4027212f0d004304bc9075dc7f54769"}, + {file = "xattr-1.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1a848ab125c0fafdc501ccd83b4c9018bba576a037a4ca5960a22f39e295552e"}, + {file = "xattr-1.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:467ee77471d26ae5187ee7081b82175b5ca56ead4b71467ec2e6119d1b08beed"}, + {file = "xattr-1.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fd35f46cb0154f7033f9d5d0960f226857acb0d1e0d71fd7af18ed84663007c"}, + {file = "xattr-1.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d956478e9bb98a1efd20ebc6e5703497c1d2d690d5a13c4df4abf59881eed50"}, + {file = "xattr-1.1.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f25dfdcd974b700fb04a40e14a664a80227ee58e02ea062ac241f0d7dc54b4e"}, + {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33b63365c1fcbc80a79f601575bac0d6921732e0245b776876f3db3fcfefe22d"}, + {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:544542be95c9b49e211f0a463758f200de88ba6d5a94d3c4f42855a484341acd"}, + {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac14c9893f3ea046784b7702be30889b200d31adcd2e6781a8a190b6423f9f2d"}, + {file = "xattr-1.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bb4bbe37ba95542081890dd34fa5347bef4651e276647adaa802d5d0d7d86452"}, + {file = "xattr-1.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3da489ecef798705f9a39ea8cea4ead0d1eeed55f92c345add89740bd930bab6"}, + {file = "xattr-1.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:798dd0cbe696635a6f74b06fc430818bf9c3b24314e1502eadf67027ab60c9b0"}, + {file = "xattr-1.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2b6361626efad5eb5a6bf8172c6c67339e09397ee8140ec41258737bea9681"}, + {file = "xattr-1.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7fa20a0c9ce022d19123b1c5b848d00a68b837251835a7929fe041ee81dcd0"}, + {file = "xattr-1.1.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e20eeb08e2c57fc7e71f050b1cfae35cbb46105449853a582bf53fd23c5379e"}, + {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:477370e75821bded901487e5e752cffe554d1bd3bd4839b627d4d1ee8c95a093"}, + {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a8682091cd34a9f4a93c8aaea4101aae99f1506e24da00a3cc3dd2eca9566f21"}, + {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2e079b3b1a274ba2121cf0da38bbe5c8d2fb1cc49ecbceb395ce20eb7d69556d"}, + {file = "xattr-1.1.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ae6579dea05bf9f335a082f711d5924a98da563cac72a2d550f5b940c401c0e9"}, + {file = "xattr-1.1.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd6038ec9df2e67af23c212693751481d5f7e858156924f14340376c48ed9ac7"}, + {file = "xattr-1.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:608b2877526674eb15df4150ef4b70b7b292ae00e65aecaae2f192af224be200"}, + {file = "xattr-1.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54dad1a6a998c6a23edfd25e99f4d38e9b942d54e518570044edf8c767687ea"}, + {file = "xattr-1.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0dab6ff72bb2b508f3850c368f8e53bd706585012676e1f71debba3310acde8"}, + {file = "xattr-1.1.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3c54c6af7cf09432b2c461af257d5f4b1cb2d59eee045f91bacef44421a46d"}, + {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e346e05a158d554639fbf7a0db169dc693c2d2260c7acb3239448f1ff4a9d67f"}, + {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3ff6d9e2103d0d6e5fcd65b85a2005b66ea81c0720a37036445faadc5bbfa424"}, + {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7a2ee4563c6414dfec0d1ac610f59d39d5220531ae06373eeb1a06ee37cd193f"}, + {file = "xattr-1.1.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878df1b38cfdadf3184ad8c7b0f516311128d5597b60ac0b3486948953658a83"}, + {file = "xattr-1.1.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c9b8350244a1c5454f93a8d572628ff71d7e2fc2f7480dcf4c4f0e8af3150fe"}, + {file = "xattr-1.1.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a46bf48fb662b8bd745b78bef1074a1e08f41a531168de62b5d7bd331dadb11a"}, + {file = "xattr-1.1.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83fc3c07b583777b1dda6355329f75ca6b7179fe0d1002f1afe0ef96f7e3b5de"}, + {file = "xattr-1.1.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6308b19cff71441513258699f0538394fad5d66e1d324635207a97cb076fd439"}, + {file = "xattr-1.1.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48c00ddc15ddadc9c729cd9504dabf50adb3d9c28f647d4ac9a3df45a046b1a0"}, + {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a06136196f26293758e1b244200b73156a0274af9a7349fa201c71c7af3bb9e8"}, + {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8fc2631a3c6cfcdc71f7f0f847461839963754e76a2015de71e7e71e3304abc0"}, + {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d6e1e835f9c938d129dd45e7eb52ebf7d2d6816323dab93ce311bf331f7d2328"}, + {file = "xattr-1.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:60dea2d369a6484e8b7136224fc2971e10e2c46340d83ab780924afe78c90066"}, + {file = "xattr-1.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85c2b778b09d919523f80f244d799a142302582d76da18903dc693207c4020b0"}, + {file = "xattr-1.1.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee0abba9e1b890d39141714ff43e9666864ca635ea8a5a2194d989e6b17fe862"}, + {file = "xattr-1.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e4174ba7f51f46b95ea7918d907c91cd579575d59e6a2f22ca36a0551026737"}, + {file = "xattr-1.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2b05e52e99d82d87528c54c2c5c8c5fb0ba435f85ac6545511aeea136e49925"}, + {file = "xattr-1.1.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a3696fad746be37de34eb73c60ea67144162bd08106a5308a90ce9dea9a3287"}, + {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a3a7149439a26b68904c14fdc4587cde4ac7d80303e9ff0fefcfd893b698c976"}, + {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:507b36a126ce900dbfa35d4e2c2db92570c933294cba5d161ecd6a89f7b52f43"}, + {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9392b417b54923e031041940d396b1d709df1d3779c6744454e1f1c1f4dad4f5"}, + {file = "xattr-1.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e9f00315e6c02943893b77f544776b49c756ac76960bea7cb8d7e1b96aefc284"}, + {file = "xattr-1.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c8f98775065260140efb348b1ff8d50fd66ddcbf0c685b76eb1e87b380aaffb3"}, + {file = "xattr-1.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b471c6a515f434a167ca16c5c15ff34ee42d11956baa749173a8a4e385ff23e7"}, + {file = "xattr-1.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee0763a1b7ceb78ba2f78bee5f30d1551dc26daafcce4ac125115fa1def20519"}, + {file = "xattr-1.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:099e6e9ce7999b403d36d9cf943105a3d25d8233486b54ec9d1b78623b050433"}, + {file = "xattr-1.1.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e56faef9dde8d969f0d646fb6171883693f88ae39163ecd919ec707fbafa85"}, + {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:328156d4e594c9ae63e1072503c168849e601a153ad37f0290743544332d6b6f"}, + {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a57a55a27c7864d6916344c9a91776afda6c3b8b2209f8a69b79cdba93fbe128"}, + {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c19cdde08b040df1e99d2500bf8a9cff775ab0e6fa162bf8afe6d84aa93ed04"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c72667f19d3a9acf324aed97f58861d398d87e42314731e7c6ab3ac7850c971"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:67ae934d75ea2563fc48a27c5945749575c74a6de19fdd38390917ddcb0e4f24"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1b0c348dd8523554dc535540d2046c0c8a535bb086561d8359f3667967b6ca"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22284255d2a8e8f3da195bd8e8d43ce674dbc7c38d38cb6ecfb37fae7755d31f"}, + {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b38aac5ef4381c26d3ce147ca98fba5a78b1e5bcd6be6755b4908659f2705c6d"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:803f864af528f6f763a5be1e7b1ccab418e55ae0e4abc8bda961d162f850c991"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:40354ebfb5cecd60a5fbb9833a8a452d147486b0ffec547823658556625d98b5"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2abaf5d06be3361bfa8e0db2ee123ba8e92beab5bceed5e9d7847f2145a32e04"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e638e5ffedc3565242b5fa3296899d35161bad771f88d66277b58f03a1ba9fe"}, + {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0597e919d116ec39997804288d77bec3777228368efc0f2294b84a527fc4f9c2"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee9455c501d19f065527afda974418b3ef7c61e85d9519d122cd6eb3cb7a00"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:89ed62ce430f5789e15cfc1ccabc172fd8b349c3a17c52d9e6c64ecedf08c265"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b824f4b9259cd8bb6e83c4873cf8bf080f6e4fa034a02fe778e07aba8d345"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fba66faa0016dfc0af3dd7ac5782b5786a1dfb851f9f3455e266f94c2a05a04"}, + {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ec4b0c3e0a7bcd103f3cf31dd40c349940b2d4223ce43d384a3548992138ef1"}, + {file = "xattr-1.1.4.tar.gz", hash = "sha256:b7b02ecb2270da5b7e7deaeea8f8b528c17368401c2b9d5f63e91f545b45d372"}, +] + +[package.dependencies] +cffi = ">=1.16.0" + +[package.extras] +test = ["pytest"] + [[package]] name = "yarl" -version = "1.18.0" +version = "1.20.0" description = "Yet another URL library" optional = false python-versions = ">=3.9" -files = [ - {file = "yarl-1.18.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:074fee89caab89a97e18ef5f29060ef61ba3cae6cd77673acc54bfdd3214b7b7"}, - {file = "yarl-1.18.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b026cf2c32daf48d90c0c4e406815c3f8f4cfe0c6dfccb094a9add1ff6a0e41a"}, - {file = "yarl-1.18.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ae38bd86eae3ba3d2ce5636cc9e23c80c9db2e9cb557e40b98153ed102b5a736"}, - {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:685cc37f3f307c6a8e879986c6d85328f4c637f002e219f50e2ef66f7e062c1d"}, - {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8254dbfce84ee5d1e81051ee7a0f1536c108ba294c0fdb5933476398df0654f3"}, - {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20de4a8b04de70c49698dc2390b7fd2d18d424d3b876371f9b775e2b462d4b41"}, - {file = "yarl-1.18.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0a2074a37285570d54b55820687de3d2f2b9ecf1b714e482e48c9e7c0402038"}, - {file = "yarl-1.18.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f576ed278860df2721a5d57da3381040176ef1d07def9688a385c8330db61a1"}, - {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3a3709450a574d61be6ac53d582496014342ea34876af8dc17cc16da32826c9a"}, - {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:bd80ed29761490c622edde5dd70537ca8c992c2952eb62ed46984f8eff66d6e8"}, - {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:32141e13a1d5a48525e519c9197d3f4d9744d818d5c7d6547524cc9eccc8971e"}, - {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8b8d3e4e014fb4274f1c5bf61511d2199e263909fb0b8bda2a7428b0894e8dc6"}, - {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:701bb4a8f4de191c8c0cc9a1e6d5142f4df880e9d1210e333b829ca9425570ed"}, - {file = "yarl-1.18.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a45d94075ac0647621eaaf693c8751813a3eccac455d423f473ffed38c8ac5c9"}, - {file = "yarl-1.18.0-cp310-cp310-win32.whl", hash = "sha256:34176bfb082add67cb2a20abd85854165540891147f88b687a5ed0dc225750a0"}, - {file = "yarl-1.18.0-cp310-cp310-win_amd64.whl", hash = "sha256:73553bbeea7d6ec88c08ad8027f4e992798f0abc459361bf06641c71972794dc"}, - {file = "yarl-1.18.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b8e8c516dc4e1a51d86ac975b0350735007e554c962281c432eaa5822aa9765c"}, - {file = "yarl-1.18.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2e6b4466714a73f5251d84b471475850954f1fa6acce4d3f404da1d55d644c34"}, - {file = "yarl-1.18.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c893f8c1a6d48b25961e00922724732d00b39de8bb0b451307482dc87bddcd74"}, - {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13aaf2bdbc8c86ddce48626b15f4987f22e80d898818d735b20bd58f17292ee8"}, - {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dd21c0128e301851de51bc607b0a6da50e82dc34e9601f4b508d08cc89ee7929"}, - {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:205de377bd23365cd85562c9c6c33844050a93661640fda38e0567d2826b50df"}, - {file = "yarl-1.18.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed69af4fe2a0949b1ea1d012bf065c77b4c7822bad4737f17807af2adb15a73c"}, - {file = "yarl-1.18.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e1c18890091aa3cc8a77967943476b729dc2016f4cfe11e45d89b12519d4a93"}, - {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:91b8fb9427e33f83ca2ba9501221ffaac1ecf0407f758c4d2f283c523da185ee"}, - {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:536a7a8a53b75b2e98ff96edb2dfb91a26b81c4fed82782035767db5a465be46"}, - {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a64619a9c47c25582190af38e9eb382279ad42e1f06034f14d794670796016c0"}, - {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c73a6bbc97ba1b5a0c3c992ae93d721c395bdbb120492759b94cc1ac71bc6350"}, - {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:a173401d7821a2a81c7b47d4e7d5c4021375a1441af0c58611c1957445055056"}, - {file = "yarl-1.18.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7520e799b1f84e095cce919bd6c23c9d49472deeef25fe1ef960b04cca51c3fc"}, - {file = "yarl-1.18.0-cp311-cp311-win32.whl", hash = "sha256:c4cb992d8090d5ae5f7afa6754d7211c578be0c45f54d3d94f7781c495d56716"}, - {file = "yarl-1.18.0-cp311-cp311-win_amd64.whl", hash = "sha256:52c136f348605974c9b1c878addd6b7a60e3bf2245833e370862009b86fa4689"}, - {file = "yarl-1.18.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1ece25e2251c28bab737bdf0519c88189b3dd9492dc086a1d77336d940c28ced"}, - {file = "yarl-1.18.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:454902dc1830d935c90b5b53c863ba2a98dcde0fbaa31ca2ed1ad33b2a7171c6"}, - {file = "yarl-1.18.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:01be8688fc211dc237e628fcc209dda412d35de7642453059a0553747018d075"}, - {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d26f1fa9fa2167bb238f6f4b20218eb4e88dd3ef21bb8f97439fa6b5313e30d"}, - {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b234a4a9248a9f000b7a5dfe84b8cb6210ee5120ae70eb72a4dcbdb4c528f72f"}, - {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe94d1de77c4cd8caff1bd5480e22342dbd54c93929f5943495d9c1e8abe9f42"}, - {file = "yarl-1.18.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b4c90c5363c6b0a54188122b61edb919c2cd1119684999d08cd5e538813a28e"}, - {file = "yarl-1.18.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:49a98ecadc5a241c9ba06de08127ee4796e1009555efd791bac514207862b43d"}, - {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9106025c7f261f9f5144f9aa7681d43867eed06349a7cfb297a1bc804de2f0d1"}, - {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:f275ede6199d0f1ed4ea5d55a7b7573ccd40d97aee7808559e1298fe6efc8dbd"}, - {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f7edeb1dcc7f50a2c8e08b9dc13a413903b7817e72273f00878cb70e766bdb3b"}, - {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c083f6dd6951b86e484ebfc9c3524b49bcaa9c420cb4b2a78ef9f7a512bfcc85"}, - {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:80741ec5b471fbdfb997821b2842c59660a1c930ceb42f8a84ba8ca0f25a66aa"}, - {file = "yarl-1.18.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b1a3297b9cad594e1ff0c040d2881d7d3a74124a3c73e00c3c71526a1234a9f7"}, - {file = "yarl-1.18.0-cp312-cp312-win32.whl", hash = "sha256:cd6ab7d6776c186f544f893b45ee0c883542b35e8a493db74665d2e594d3ca75"}, - {file = "yarl-1.18.0-cp312-cp312-win_amd64.whl", hash = "sha256:039c299a0864d1f43c3e31570045635034ea7021db41bf4842693a72aca8df3a"}, - {file = "yarl-1.18.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6fb64dd45453225f57d82c4764818d7a205ee31ce193e9f0086e493916bd4f72"}, - {file = "yarl-1.18.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3adaaf9c6b1b4fc258584f4443f24d775a2086aee82d1387e48a8b4f3d6aecf6"}, - {file = "yarl-1.18.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:da206d1ec78438a563c5429ab808a2b23ad7bc025c8adbf08540dde202be37d5"}, - {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:576d258b21c1db4c6449b1c572c75d03f16a482eb380be8003682bdbe7db2f28"}, - {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c60e547c0a375c4bfcdd60eef82e7e0e8698bf84c239d715f5c1278a73050393"}, - {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3818eabaefb90adeb5e0f62f047310079d426387991106d4fbf3519eec7d90a"}, - {file = "yarl-1.18.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5f72421246c21af6a92fbc8c13b6d4c5427dfd949049b937c3b731f2f9076bd"}, - {file = "yarl-1.18.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7fa7d37f2ada0f42e0723632993ed422f2a679af0e200874d9d861720a54f53e"}, - {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:42ba84e2ac26a3f252715f8ec17e6fdc0cbf95b9617c5367579fafcd7fba50eb"}, - {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6a49ad0102c0f0ba839628d0bf45973c86ce7b590cdedf7540d5b1833ddc6f00"}, - {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96404e8d5e1bbe36bdaa84ef89dc36f0e75939e060ca5cd45451aba01db02902"}, - {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:a0509475d714df8f6d498935b3f307cd122c4ca76f7d426c7e1bb791bcd87eda"}, - {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:1ff116f0285b5c8b3b9a2680aeca29a858b3b9e0402fc79fd850b32c2bcb9f8b"}, - {file = "yarl-1.18.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2580c1d7e66e6d29d6e11855e3b1c6381971e0edd9a5066e6c14d79bc8967af"}, - {file = "yarl-1.18.0-cp313-cp313-win32.whl", hash = "sha256:14408cc4d34e202caba7b5ac9cc84700e3421a9e2d1b157d744d101b061a4a88"}, - {file = "yarl-1.18.0-cp313-cp313-win_amd64.whl", hash = "sha256:1db1537e9cb846eb0ff206eac667f627794be8b71368c1ab3207ec7b6f8c5afc"}, - {file = "yarl-1.18.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fa2c9cb607e0f660d48c54a63de7a9b36fef62f6b8bd50ff592ce1137e73ac7d"}, - {file = "yarl-1.18.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c0f4808644baf0a434a3442df5e0bedf8d05208f0719cedcd499e168b23bfdc4"}, - {file = "yarl-1.18.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7db9584235895a1dffca17e1c634b13870852094f6389b68dcc6338086aa7b08"}, - {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:309f8d27d6f93ceeeb80aa6980e883aa57895270f7f41842b92247e65d7aeddf"}, - {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:609ffd44fed2ed88d9b4ef62ee860cf86446cf066333ad4ce4123505b819e581"}, - {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f172b8b2c72a13a06ea49225a9c47079549036ad1b34afa12d5491b881f5b993"}, - {file = "yarl-1.18.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89ae7de94631b60d468412c18290d358a9d805182373d804ec839978b120422"}, - {file = "yarl-1.18.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:466d31fd043ef9af822ee3f1df8fdff4e8c199a7f4012c2642006af240eade17"}, - {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7609b8462351c4836b3edce4201acb6dd46187b207c589b30a87ffd1813b48dc"}, - {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d9d4f5e471e8dc49b593a80766c2328257e405f943c56a3dc985c125732bc4cf"}, - {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:67b336c15e564d76869c9a21316f90edf546809a5796a083b8f57c845056bc01"}, - {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:b212452b80cae26cb767aa045b051740e464c5129b7bd739c58fbb7deb339e7b"}, - {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:38b39b7b3e692b6c92b986b00137a3891eddb66311b229d1940dcbd4f025083c"}, - {file = "yarl-1.18.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a7ee6884a8848792d58b854946b685521f41d8871afa65e0d4a774954e9c9e89"}, - {file = "yarl-1.18.0-cp39-cp39-win32.whl", hash = "sha256:b4095c5019bb889aa866bf12ed4c85c0daea5aafcb7c20d1519f02a1e738f07f"}, - {file = "yarl-1.18.0-cp39-cp39-win_amd64.whl", hash = "sha256:2d90f2e4d16a5b0915ee065218b435d2ef619dd228973b1b47d262a6f7cd8fa5"}, - {file = "yarl-1.18.0-py3-none-any.whl", hash = "sha256:dbf53db46f7cf176ee01d8d98c39381440776fcda13779d269a8ba664f69bec0"}, - {file = "yarl-1.18.0.tar.gz", hash = "sha256:20d95535e7d833889982bfe7cc321b7f63bf8879788fee982c76ae2b24cfb715"}, +groups = ["main", "dev"] +files = [ + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19"}, + {file = "yarl-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d"}, + {file = "yarl-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5"}, + {file = "yarl-1.20.0-cp311-cp311-win32.whl", hash = "sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6"}, + {file = "yarl-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b"}, + {file = "yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64"}, + {file = "yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384"}, + {file = "yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62"}, + {file = "yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f"}, + {file = "yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac"}, + {file = "yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0"}, + {file = "yarl-1.20.0-cp39-cp39-win32.whl", hash = "sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8"}, + {file = "yarl-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7"}, + {file = "yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124"}, + {file = "yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" -propcache = ">=0.2.0" +propcache = ">=0.2.1" + +[[package]] +name = "zstandard" +version = "0.23.0" +description = "Zstandard bindings for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, + {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, + {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, + {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, + {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, + {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, + {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, + {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, + {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, + {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, + {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, + {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, + {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, + {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, + {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, + {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, + {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, + {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, + {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, + {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, + {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, + {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, + {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, + {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, + {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, + {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, + {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, + {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, + {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, + {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, + {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, + {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, +] + +[package.dependencies] +cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} + +[package.extras] +cffi = ["cffi (>=1.11)"] [metadata] -lock-version = "2.0" -python-versions = "^3.12" -content-hash = "e2c38582470adec925b3b5dd3f4ca2e7475fd824a6575b1f4e9498952ce22d51" +lock-version = "2.1" +python-versions = "^3.13" +content-hash = "fe4b80b2de0d983930c20ec5e04a9f2752143a2abc2a59ccdbfb4714b4bdde47" diff --git a/projects/background_jobs/poetry.lock b/projects/background_jobs/poetry.lock deleted file mode 100644 index 81a12ea79..000000000 --- a/projects/background_jobs/poetry.lock +++ /dev/null @@ -1,2671 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "aiofile" -version = "3.9.0" -description = "Asynchronous file operations." -optional = false -python-versions = "<4,>=3.8" -files = [ - {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, - {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, -] - -[package.dependencies] -caio = ">=0.9.0,<0.10.0" - -[[package]] -name = "aiofiles" -version = "24.1.0" -description = "File support for asyncio." -optional = false -python-versions = ">=3.8" -files = [ - {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, - {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, -] - -[[package]] -name = "alembic" -version = "1.14.0" -description = "A database migration tool for SQLAlchemy." -optional = false -python-versions = ">=3.8" -files = [ - {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, - {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, -] - -[package.dependencies] -Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" - -[package.extras] -tz = ["backports.zoneinfo"] - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "anyio" -version = "4.4.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, -] - -[package.dependencies] -idna = ">=2.8" -sniffio = ">=1.1" - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] - -[[package]] -name = "asyncache" -version = "0.3.1" -description = "Helpers to use cachetools with async code." -optional = false -python-versions = ">=3.8,<4.0" -files = [ - {file = "asyncache-0.3.1-py3-none-any.whl", hash = "sha256:ef20a1024d265090dd1e0785c961cf98b9c32cc7d9478973dcf25ac1b80011f5"}, - {file = "asyncache-0.3.1.tar.gz", hash = "sha256:9a1e60a75668e794657489bdea6540ee7e3259c483517b934670db7600bf5035"}, -] - -[package.dependencies] -cachetools = ">=5.2.0,<6.0.0" - -[[package]] -name = "asyncpg" -version = "0.30.0" -description = "An asyncio PostgreSQL driver" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, - {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, - {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"}, - {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"}, - {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"}, - {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"}, - {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"}, - {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"}, - {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"}, - {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"}, - {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"}, - {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"}, - {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"}, - {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"}, - {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"}, - {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"}, - {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"}, - {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"}, - {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"}, - {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"}, - {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"}, - {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"}, - {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"}, - {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"}, - {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"}, - {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"}, - {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"}, - {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"}, - {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"}, - {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"}, - {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"}, - {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"}, - {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"}, - {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"}, - {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"}, - {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"}, - {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"}, - {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"}, - {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"}, - {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"}, - {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"}, - {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"}, - {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"}, - {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"}, - {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"}, - {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"}, - {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"}, - {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"}, - {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"}, -] - -[package.extras] -docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi", "sspilib"] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] - -[[package]] -name = "authlib" -version = "1.3.2" -description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, - {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, -] - -[package.dependencies] -cryptography = "*" - -[[package]] -name = "authzed" -version = "1.1.0" -description = "Client library for SpiceDB." -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "authzed-1.1.0-py3-none-any.whl", hash = "sha256:1c37038655c55d054b5caf918d60d680262fda4bc2787dc83576b4424e358214"}, - {file = "authzed-1.1.0.tar.gz", hash = "sha256:6e1300ff75af1840acdb3e0b2bc0dec31a8cf631c4ac6fc1ac674b9ea02d043a"}, -] - -[package.dependencies] -googleapis-common-protos = ">=1.65.0,<2.0.0" -grpc-interceptor = ">=0.15.4,<0.16.0" -grpcio = ">=1.63,<2.0" -protobuf = ">=5.26,<6" - -[[package]] -name = "avro-preprocessor" -version = "0.5.1" -description = "A preprocessor for Avro Schemata" -optional = false -python-versions = "*" -files = [ - {file = "avro-preprocessor-0.5.1.tar.gz", hash = "sha256:d878ea5134223580ba5f6636d812c844cb0f7552025755306b1fa483f015bd5f"}, - {file = "avro_preprocessor-0.5.1-py3-none-any.whl", hash = "sha256:04c62a47d97bf3f5b025d9e0c25d6ae3f4cac40cef89c3530bd7a3ed8affe73a"}, -] - -[package.dependencies] -json5 = ">=0.9.21" -networkx = ">=2.8.7" -pygments = ">=2.13.0" -requests = ">=2.28.1" -"ruamel.yaml" = ">=0.17.21" -"ruamel.yaml.clib" = ">=0.2.6" - -[[package]] -name = "cachetools" -version = "5.3.3" -description = "Extensible memoizing collections and decorators" -optional = false -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, -] - -[[package]] -name = "caio" -version = "0.9.17" -description = "Asynchronous file IO for Linux MacOS or Windows." -optional = false -python-versions = "<4,>=3.7" -files = [ - {file = "caio-0.9.17-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3f69395fdd45c115b2ef59732e3c8664722a2b51de2d6eedb3d354b2f5f3be3c"}, - {file = "caio-0.9.17-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3028b746e9ec7f6d6ebb386a7fd8caf0eebed5d6e6b4f18c8ef25861934b1673"}, - {file = "caio-0.9.17-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:079730a353bbde03796fab681e969472eace09ffbe5000e584868a7fe389ba6f"}, - {file = "caio-0.9.17-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:549caa51b475877fe32856a26fe937366ae7a1c23a9727005b441db9abb12bcc"}, - {file = "caio-0.9.17-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0ddb253b145a53ecca76381677ce465bc5efeaecb6aaf493fac43ae79659f0fb"}, - {file = "caio-0.9.17-cp312-cp312-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e320b0ea371c810359934f8e8fe81777c493cc5fb4d41de44277cbe7336e74"}, - {file = "caio-0.9.17-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a39a49e279f82aa022f0786339d45d9550b5aa3e46eec7d08e0f351c503df0a5"}, - {file = "caio-0.9.17-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e96925b9f15f43e6ef1d42a83edfd937eb11a984cb6ef7c10527e963595497"}, - {file = "caio-0.9.17-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fca916240597005d2b734f1442fa3c3cfb612bf46e0978b5232e5492a371de38"}, - {file = "caio-0.9.17-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40bd0afbd3491d1e407bcf74e3a9e9cc67a7f290ed29518325194184d63cc2b6"}, - {file = "caio-0.9.17-py3-none-any.whl", hash = "sha256:c55d4dc6b3a36f93237ecd6360e1c131c3808bc47d4191a130148a99b80bb311"}, - {file = "caio-0.9.17.tar.gz", hash = "sha256:8f30511526814d961aeef389ea6885273abe6c655f1e08abbadb95d12fdd9b4f"}, -] - -[package.extras] -develop = ["aiomisc-pytest", "pytest", "pytest-cov"] - -[[package]] -name = "casefy" -version = "0.1.7" -description = "Utilities for string case conversion." -optional = false -python-versions = ">=3.6" -files = [ - {file = "casefy-0.1.7-py3-none-any.whl", hash = "sha256:ab05ff1c67f2a8e62d9f8986fa9a849416d61ac5413ec57d1f827b4f36589cf6"}, - {file = "casefy-0.1.7.tar.gz", hash = "sha256:6accce985a64b9edb2a610a29ac489d78fac80e52ff8f2d137e294f2f92b8027"}, -] - -[[package]] -name = "certifi" -version = "2024.6.2" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, -] - -[[package]] -name = "cffi" -version = "1.16.0" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "cryptography" -version = "44.0.0" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = "!=3.9.0,!=3.9.1,>=3.7" -files = [ - {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, - {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, - {file = "cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd"}, - {file = "cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, - {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, - {file = "cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"}, - {file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] -docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] -sdist = ["build (>=1.0.0)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "dacite" -version = "1.8.1" -description = "Simple creation of data classes from dictionaries." -optional = false -python-versions = ">=3.6" -files = [ - {file = "dacite-1.8.1-py3-none-any.whl", hash = "sha256:cc31ad6fdea1f49962ea42db9421772afe01ac5442380d9a99fcf3d188c61afe"}, -] - -[package.extras] -dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] - -[[package]] -name = "dataclasses-avroschema" -version = "0.65.4" -description = "Generate Avro Schemas from Python classes. Serialize/Deserialize python instances with avro schemas" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "dataclasses_avroschema-0.65.4-py3-none-any.whl", hash = "sha256:f9a12541c73dfd79d68be4e873b0045b38fc03f31457e76102c91c0df75958d9"}, - {file = "dataclasses_avroschema-0.65.4.tar.gz", hash = "sha256:d91c63b854b397595fb90946840fe02f29c1ca8cec000f3aa79f8f757aae0528"}, -] - -[package.dependencies] -casefy = ">=0.1.7,<0.2.0" -dacite = ">=1.8.0,<2.0.0" -fastavro = ">=1.7.3,<2.0.0" -inflection = ">=0.5.1,<0.6.0" -python-dateutil = ">=2.7,<3.0" -typing-extensions = ">=4.2.0,<5.0.0" - -[package.extras] -cli = ["dc-avro (>=0.6.4)"] -faker = ["faker (>=26.0.0,<31.0.0)"] -faust = ["faust-streaming (>=0.10.11,<0.12.0)"] -pydantic = ["pydantic[email] (>=2.4.2,<3.0.0)"] - -[[package]] -name = "deepmerge" -version = "2.0" -description = "A toolset for deeply merging Python dictionaries." -optional = false -python-versions = ">=3.8" -files = [ - {file = "deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00"}, - {file = "deepmerge-2.0.tar.gz", hash = "sha256:5c3d86081fbebd04dd5de03626a0607b809a98fb6ccba5770b62466fe940ff20"}, -] - -[package.extras] -dev = ["black", "build", "mypy", "pytest", "pyupgrade", "twine", "validate-pyproject[all]"] - -[[package]] -name = "dnspython" -version = "2.6.1" -description = "DNS toolkit" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, -] - -[package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] -doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] -trio = ["trio (>=0.23)"] -wmi = ["wmi (>=1.5.1)"] - -[[package]] -name = "durationpy" -version = "0.7" -description = "Module for converting between datetime.timedelta and Go's Duration strings." -optional = false -python-versions = "*" -files = [ - {file = "durationpy-0.7.tar.gz", hash = "sha256:8447c43df4f1a0b434e70c15a38d77f5c9bd17284bfc1ff1d430f233d5083732"}, -] - -[[package]] -name = "email-validator" -version = "2.2.0" -description = "A robust email address syntax and deliverability validation library." -optional = false -python-versions = ">=3.8" -files = [ - {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, - {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, -] - -[package.dependencies] -dnspython = ">=2.0.0" -idna = ">=2.0.0" - -[[package]] -name = "escapism" -version = "1.0.1" -description = "Simple, generic API for escaping strings." -optional = false -python-versions = "*" -files = [ - {file = "escapism-1.0.1-py2.py3-none-any.whl", hash = "sha256:d28f19edc3cb1ffc36fa238956ecc068695477e748f57157c6dde00a6b77f229"}, - {file = "escapism-1.0.1.tar.gz", hash = "sha256:73256bdfb4f22230f0428fc6efecee61cdc4fad531b6f98b849cb9c80711e4ec"}, -] - -[[package]] -name = "factory-boy" -version = "3.3.0" -description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." -optional = false -python-versions = ">=3.7" -files = [ - {file = "factory_boy-3.3.0-py2.py3-none-any.whl", hash = "sha256:a2cdbdb63228177aa4f1c52f4b6d83fab2b8623bf602c7dedd7eb83c0f69c04c"}, - {file = "factory_boy-3.3.0.tar.gz", hash = "sha256:bc76d97d1a65bbd9842a6d722882098eb549ec8ee1081f9fb2e8ff29f0c300f1"}, -] - -[package.dependencies] -Faker = ">=0.7.0" - -[package.extras] -dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "sqlalchemy-utils", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] -doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] - -[[package]] -name = "faker" -version = "25.9.1" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Faker-25.9.1-py3-none-any.whl", hash = "sha256:f1dc27dc8035cb7e97e96afbb5fe1305eed6aeea53374702cbac96acfe851626"}, - {file = "Faker-25.9.1.tar.gz", hash = "sha256:0e1cf7a8d3c94de91a65ab1e9cf7050903efae1e97901f8e5924a9f45147ae44"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" - -[[package]] -name = "fakeredis" -version = "2.26.1" -description = "Python implementation of redis API, can be used for testing purposes." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "fakeredis-2.26.1-py3-none-any.whl", hash = "sha256:68a5615d7ef2529094d6958677e30a6d30d544e203a5ab852985c19d7ad57e32"}, - {file = "fakeredis-2.26.1.tar.gz", hash = "sha256:69f4daafe763c8014a6dbf44a17559c46643c95447b3594b3975251a171b806d"}, -] - -[package.dependencies] -redis = {version = ">=4.3", markers = "python_full_version > \"3.8.0\""} -sortedcontainers = ">=2,<3" - -[package.extras] -bf = ["pyprobables (>=0.6,<0.7)"] -cf = ["pyprobables (>=0.6,<0.7)"] -json = ["jsonpath-ng (>=1.6,<2.0)"] -lua = ["lupa (>=2.1,<3.0)"] -probabilistic = ["pyprobables (>=0.6,<0.7)"] - -[[package]] -name = "fastavro" -version = "1.9.4" -description = "Fast read/write of AVRO files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastavro-1.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:60cb38f07462a7fb4e4440ed0de67d3d400ae6b3d780f81327bebde9aa55faef"}, - {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:063d01d197fc929c20adc09ca9f0ca86d33ac25ee0963ce0b438244eee8315ae"}, - {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a9053fcfbc895f2a16a4303af22077e3a8fdcf1cd5d6ed47ff2ef22cbba2f0"}, - {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:02bf1276b7326397314adf41b34a4890f6ffa59cf7e0eb20b9e4ab0a143a1598"}, - {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56bed9eca435389a8861e6e2d631ec7f8f5dda5b23f93517ac710665bd34ca29"}, - {file = "fastavro-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:0cd2099c8c672b853e0b20c13e9b62a69d3fbf67ee7c59c7271ba5df1680310d"}, - {file = "fastavro-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:af8c6d8c43a02b5569c093fc5467469541ac408c79c36a5b0900d3dd0b3ba838"}, - {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a138710bd61580324d23bc5e3df01f0b82aee0a76404d5dddae73d9e4c723f"}, - {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:903d97418120ca6b6a7f38a731166c1ccc2c4344ee5e0470d09eb1dc3687540a"}, - {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c443eeb99899d062dbf78c525e4614dd77e041a7688fa2710c224f4033f193ae"}, - {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ac26ab0774d1b2b7af6d8f4300ad20bbc4b5469e658a02931ad13ce23635152f"}, - {file = "fastavro-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:cf7247874c22be856ba7d1f46a0f6e0379a6025f1a48a7da640444cbac6f570b"}, - {file = "fastavro-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:68912f2020e1b3d70557260b27dd85fb49a4fc6bfab18d384926127452c1da4c"}, - {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6925ce137cdd78e109abdb0bc33aad55de6c9f2d2d3036b65453128f2f5f5b92"}, - {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b928cd294e36e35516d0deb9e104b45be922ba06940794260a4e5dbed6c192a"}, - {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:90c9838bc4c991ffff5dd9d88a0cc0030f938b3fdf038cdf6babde144b920246"}, - {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:eca6e54da571b06a3c5a72dbb7212073f56c92a6fbfbf847b91c347510f8a426"}, - {file = "fastavro-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a4b02839ac261100cefca2e2ad04cdfedc556cb66b5ec735e0db428e74b399de"}, - {file = "fastavro-1.9.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4451ee9a305a73313a1558d471299f3130e4ecc10a88bf5742aa03fb37e042e6"}, - {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8524fccfb379565568c045d29b2ebf71e1f2c0dd484aeda9fe784ef5febe1a8"}, - {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d0a00a6e09baa20f6f038d7a2ddcb7eef0e7a9980e947a018300cb047091b8"}, - {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:23d7e5b29c9bf6f26e8be754b2c8b919838e506f78ef724de7d22881696712fc"}, - {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e6ab3ee53944326460edf1125b2ad5be2fadd80f7211b13c45fa0c503b4cf8d"}, - {file = "fastavro-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:64d335ec2004204c501f8697c385d0a8f6b521ac82d5b30696f789ff5bc85f3c"}, - {file = "fastavro-1.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7e05f44c493e89e73833bd3ff3790538726906d2856f59adc8103539f4a1b232"}, - {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:253c63993250bff4ee7b11fb46cf3a4622180a783bedc82a24c6fdcd1b10ca2a"}, - {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24d6942eb1db14640c2581e0ecd1bbe0afc8a83731fcd3064ae7f429d7880cb7"}, - {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d47bb66be6091cd48cfe026adcad11c8b11d7d815a2949a1e4ccf03df981ca65"}, - {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c293897f12f910e58a1024f9c77f565aa8e23b36aafda6ad8e7041accc57a57f"}, - {file = "fastavro-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:f05d2afcb10a92e2a9e580a3891f090589b3e567fdc5641f8a46a0b084f120c3"}, - {file = "fastavro-1.9.4.tar.gz", hash = "sha256:56b8363e360a1256c94562393dc7f8611f3baf2b3159f64fb2b9c6b87b14e876"}, -] - -[package.extras] -codecs = ["cramjam", "lz4", "zstandard"] -lz4 = ["lz4"] -snappy = ["cramjam"] -zstandard = ["zstandard"] - -[[package]] -name = "google-auth" -version = "2.30.0" -description = "Google Authentication Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"}, - {file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"}, -] - -[package.dependencies] -cachetools = ">=2.0.0,<6.0" -pyasn1-modules = ">=0.2.1" -rsa = ">=3.1.4,<5" - -[package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] -reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] - -[[package]] -name = "googleapis-common-protos" -version = "1.65.0" -description = "Common protobufs used in Google APIs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, -] - -[package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" - -[package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] - -[[package]] -name = "greenlet" -version = "3.0.3" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "grpc-interceptor" -version = "0.15.4" -description = "Simplifies gRPC interceptors" -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926"}, - {file = "grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d"}, -] - -[package.dependencies] -grpcio = ">=1.49.1,<2.0.0" - -[package.extras] -testing = ["protobuf (>=4.21.9)"] - -[[package]] -name = "grpcio" -version = "1.64.1" -description = "HTTP/2-based RPC framework" -optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, - {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, - {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, - {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, - {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, - {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, - {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, - {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, - {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, - {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, - {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, - {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, - {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, - {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, - {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, - {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, - {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, - {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, - {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, - {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, - {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, -] - -[package.extras] -protobuf = ["grpcio-tools (>=1.64.1)"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "html5tagger" -version = "1.3.0" -description = "Pythonic HTML generation/templating (no template files)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}, - {file = "html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}, -] - -[[package]] -name = "httpcore" -version = "1.0.5" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] - -[[package]] -name = "httptools" -version = "0.6.1" -description = "A collection of framework independent HTTP protocol utils." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, -] - -[package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] - -[[package]] -name = "httpx" -version = "0.28.0" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc"}, - {file = "httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "httpx-ws" -version = "0.6.0" -description = "WebSockets support for HTTPX" -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx_ws-0.6.0-py3-none-any.whl", hash = "sha256:437cfca94519a4e6ae06eb5573192df6c0da85c22b1a19cc1ea0b02b05a51d25"}, - {file = "httpx_ws-0.6.0.tar.gz", hash = "sha256:60218f531fb474a2143af38568f4b7d94ba356780973443365c8e2c87882bb8c"}, -] - -[package.dependencies] -anyio = ">=4" -httpcore = ">=1.0.4" -httpx = ">=0.23.1" -wsproto = "*" - -[[package]] -name = "idna" -version = "3.7" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, -] - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -optional = false -python-versions = ">=3.5" -files = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." -optional = false -python-versions = ">=3.8" -files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, -] - -[[package]] -name = "kr8s" -version = "0.18.1" -description = "A Kubernetes API library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "kr8s-0.18.1-py3-none-any.whl", hash = "sha256:192d659c70c7650e7641c3c69a656ac16e51672118468eef8224ea60009932c4"}, - {file = "kr8s-0.18.1.tar.gz", hash = "sha256:73c864c108e2f5159faab8dba9833011d586918f4520dfc64594df7b7907493f"}, -] - -[package.dependencies] -anyio = ">=3.7.0" -asyncache = ">=0.3.1" -cryptography = ">=35" -httpx = ">=0.24.1" -httpx-ws = ">=0.5.2" -python-box = ">=7.0.1" -python-jsonpath = ">=0.7.1" -pyyaml = ">=6.0" -typing-extensions = ">=4.12.2" - -[package.extras] -docs = ["furo (>=2023.3.27)", "myst-parser (>=1.0.0)", "sphinx (>=5.3.0)", "sphinx-autoapi (>=2.1.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-copybutton (>=0.5.1)", "sphinx-design (>=0.3.0)", "sphinxcontrib-mermaid (>=0.8.1)"] -test = ["kubernetes (>=26.1.0)", "kubernetes-asyncio (>=24.2.3)", "kubernetes-validate (>=1.28.0)", "lightkube (>=0.13.0)", "pykube-ng (>=23.6.0)", "pytest (>=7.2.2)", "pytest-asyncio (>=0.20.3)", "pytest-cov (>=4.0.0)", "pytest-kind (>=22.11.1)", "pytest-rerunfailures (>=11.1.2)", "pytest-timeout (>=2.1.0)", "trio (>=0.22.0)", "types-pyyaml (>=6.0)"] - -[[package]] -name = "kubernetes" -version = "31.0.0" -description = "Kubernetes python client" -optional = false -python-versions = ">=3.6" -files = [ - {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, - {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, -] - -[package.dependencies] -certifi = ">=14.05.14" -durationpy = ">=0.7" -google-auth = ">=1.0.1" -oauthlib = ">=3.2.2" -python-dateutil = ">=2.5.3" -pyyaml = ">=5.4.1" -requests = "*" -requests-oauthlib = "*" -six = ">=1.9.0" -urllib3 = ">=1.24.2" -websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" - -[package.extras] -adal = ["adal (>=1.0.2)"] - -[[package]] -name = "mako" -version = "1.3.5" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, -] - -[package.dependencies] -MarkupSafe = ">=0.9.2" - -[package.extras] -babel = ["Babel"] -lingua = ["lingua"] -testing = ["pytest"] - -[[package]] -name = "markupsafe" -version = "2.1.5" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, -] - -[[package]] -name = "marshmallow" -version = "3.23.1" -description = "A lightweight library for converting complex datatypes to and from native Python datatypes." -optional = false -python-versions = ">=3.9" -files = [ - {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, - {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, -] - -[package.dependencies] -packaging = ">=17.0" - -[package.extras] -dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] -tests = ["pytest", "simplejson"] - -[[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, -] - -[[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.10" -files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, -] - -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - -[[package]] -name = "packaging" -version = "24.1" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, -] - -[[package]] -name = "prometheus-client" -version = "0.7.1" -description = "Python client for the Prometheus monitoring system." -optional = false -python-versions = "*" -files = [ - {file = "prometheus_client-0.7.1.tar.gz", hash = "sha256:71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da"}, -] - -[package.extras] -twisted = ["twisted"] - -[[package]] -name = "prometheus-sanic" -version = "3.0.0" -description = "Exposes Prometheus monitoring metrics of Sanic apps." -optional = false -python-versions = ">=3.7,<4.0" -files = [ - {file = "prometheus-sanic-3.0.0.tar.gz", hash = "sha256:06cfe8f9c843a1324fa801b9092f26470a63196b9e08fad0c0f12b49ddbf6c3c"}, - {file = "prometheus_sanic-3.0.0-py3-none-any.whl", hash = "sha256:499110bf2a86f921b229083e0bcea4d489420abf6737e0d838cd234394fd91aa"}, -] - -[package.dependencies] -prometheus-client = ">=0.7.1,<0.8.0" -sanic = ">=22.0.0" - -[[package]] -name = "protobuf" -version = "5.29.0" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.29.0-cp310-abi3-win32.whl", hash = "sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2"}, - {file = "protobuf-5.29.0-cp310-abi3-win_amd64.whl", hash = "sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069"}, - {file = "protobuf-5.29.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20"}, - {file = "protobuf-5.29.0-cp38-cp38-win32.whl", hash = "sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a"}, - {file = "protobuf-5.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86"}, - {file = "protobuf-5.29.0-cp39-cp39-win32.whl", hash = "sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac"}, - {file = "protobuf-5.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368"}, - {file = "protobuf-5.29.0-py3-none-any.whl", hash = "sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f"}, - {file = "protobuf-5.29.0.tar.gz", hash = "sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001"}, -] - -[[package]] -name = "psycopg" -version = "3.2.3" -description = "PostgreSQL database adapter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, - {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, -] - -[package.dependencies] -psycopg-binary = {version = "3.2.3", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -binary = ["psycopg-binary (==3.2.3)"] -c = ["psycopg-c (==3.2.3)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.11)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] -docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] -pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] - -[[package]] -name = "psycopg-binary" -version = "3.2.3" -description = "PostgreSQL database adapter for Python -- C optimisation distribution" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73adc05452fb85e7a12ed3f69c81540a8875960739082e6ea5e28c373a30774"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8630943143c6d6ca9aefc88bbe5e76c90553f4e1a3b2dc339e67dc34aa86f7e"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bffb61e198a91f712cc3d7f2d176a697cb05b284b2ad150fb8edb308eba9002"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4fa2240c9fceddaa815a58f29212826fafe43ce80ff666d38c4a03fb036955"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:192a5f8496e6e1243fdd9ac20e117e667c0712f148c5f9343483b84435854c78"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64dc6e9ec64f592f19dc01a784e87267a64a743d34f68488924251253da3c818"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:79498df398970abcee3d326edd1d4655de7d77aa9aecd578154f8af35ce7bbd2"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:949551752930d5e478817e0b49956350d866b26578ced0042a61967e3fcccdea"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:80a2337e2dfb26950894c8301358961430a0304f7bfe729d34cc036474e9c9b1"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6d8f2144e0d5808c2e2aed40fbebe13869cd00c2ae745aca4b3b16a435edb056"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94253be2b57ef2fea7ffe08996067aabf56a1eb9648342c9e3bad9e10c46e045"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda0162b0dbfa5eaed6cdc708179fa27e148cb8490c7d62e5cf30713909658ea"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0419cdad8c70eaeb3116bb28e7b42d546f91baf5179d7556f230d40942dc78"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74fbf5dd3ef09beafd3557631e282f00f8af4e7a78fbfce8ab06d9cd5a789aae"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d784f614e4d53050cbe8abf2ae9d1aaacf8ed31ce57b42ce3bf2a48a66c3a5c"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4e76ce2475ed4885fe13b8254058be710ec0de74ebd8ef8224cf44a9a3358e5f"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5938b257b04c851c2d1e6cb2f8c18318f06017f35be9a5fe761ee1e2e344dfb7"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:257c4aea6f70a9aef39b2a77d0658a41bf05c243e2bf41895eb02220ac6306f3"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:06b5cc915e57621eebf2393f4173793ed7e3387295f07fed93ed3fb6a6ccf585"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:09baa041856b35598d335b1a74e19a49da8500acedf78164600694c0ba8ce21b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:48f8ca6ee8939bab760225b2ab82934d54330eec10afe4394a92d3f2a0c37dd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5361ea13c241d4f0ec3f95e0bf976c15e2e451e9cc7ef2e5ccfc9d170b197a40"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb987f14af7da7c24f803111dbc7392f5070fd350146af3345103f76ea82e339"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0463a11b1cace5a6aeffaf167920707b912b8986a9c7920341c75e3686277920"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b7be9a6c06518967b641fb15032b1ed682fd3b0443f64078899c61034a0bca6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64a607e630d9f4b2797f641884e52b9f8e239d35943f51bef817a384ec1678fe"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fa33ead69ed133210d96af0c63448b1385df48b9c0247eda735c5896b9e6dbbf"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1f8b0d0e99d8e19923e6e07379fa00570be5182c201a8c0b5aaa9a4d4a4ea20b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:709447bd7203b0b2debab1acec23123eb80b386f6c29e7604a5d4326a11e5bd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e37d5027e297a627da3551a1e962316d0f88ee4ada74c768f6c9234e26346d9"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:261f0031ee6074765096a19b27ed0f75498a8338c3dcd7f4f0d831e38adf12d1"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:41fdec0182efac66b27478ac15ef54c9ebcecf0e26ed467eb7d6f262a913318b"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:07d019a786eb020c0f984691aa1b994cb79430061065a694cf6f94056c603d26"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57615791a337378fe5381143259a6c432cdcbb1d3e6428bfb7ce59fff3fb5c"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8eb9a4e394926b93ad919cad1b0a918e9b4c846609e8c1cfb6b743683f64da0"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5905729668ef1418bd36fbe876322dcb0f90b46811bba96d505af89e6fbdce2f"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd65774ed7d65101b314808b6893e1a75b7664f680c3ef18d2e5c84d570fa393"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:700679c02f9348a0d0a2adcd33a0275717cd0d0aee9d4482b47d935023629505"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96334bb64d054e36fed346c50c4190bad9d7c586376204f50bede21a913bf942"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9099e443d4cc24ac6872e6a05f93205ba1a231b1a8917317b07c9ef2b955f1f4"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1985ab05e9abebfbdf3163a16ebb37fbc5d49aff2bf5b3d7375ff0920bbb54cd"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:e90352d7b610b4693fad0feea48549d4315d10f1eba5605421c92bb834e90170"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69320f05de8cdf4077ecd7fefdec223890eea232af0d58f2530cbda2871244a0"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4926ea5c46da30bec4a85907aa3f7e4ea6313145b2aa9469fdb861798daf1502"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64c4cd0d50d5b2288ab1bcb26c7126c772bbdebdfadcd77225a77df01c4a57e"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05a1bdce30356e70a05428928717765f4a9229999421013f41338d9680d03a63"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad357e426b0ea5c3043b8ec905546fa44b734bf11d33b3da3959f6e4447d350"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:967b47a0fd237aa17c2748fdb7425015c394a6fb57cdad1562e46a6eb070f96d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:71db8896b942770ed7ab4efa59b22eee5203be2dfdee3c5258d60e57605d688c"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2773f850a778575dd7158a6dd072f7925b67f3ba305e2003538e8831fec77a1d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aeddf7b3b3f6e24ccf7d0edfe2d94094ea76b40e831c16eff5230e040ce3b76b"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:824c867a38521d61d62b60aca7db7ca013a2b479e428a0db47d25d8ca5067410"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9994f7db390c17fc2bd4c09dca722fd792ff8a49bb3bdace0c50a83f22f1767d"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1303bf8347d6be7ad26d1362af2c38b3a90b8293e8d56244296488ee8591058e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:842da42a63ecb32612bb7f5b9e9f8617eab9bc23bd58679a441f4150fcc51c96"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb342a01c76f38a12432848e6013c57eb630103e7556cf79b705b53814c3949"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40af959173ea0d087b6b232b855cfeaa6738f47cb2a0fd10a7f4fa8b74293f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b60b465773a52c7d4705b0a751f7f1cdccf81dd12aee3b921b31a6e76b07b0e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc6d87a1c44df8d493ef44988a3ded751e284e02cdf785f746c2d357e99782a6"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f0b018e37608c3bfc6039a1dc4eb461e89334465a19916be0153c757a78ea426"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a29f5294b0b6360bfda69653697eff70aaf2908f58d1073b0acd6f6ab5b5a4f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:e56b1fd529e5dde2d1452a7d72907b37ed1b4f07fdced5d8fb1e963acfff6749"}, -] - -[[package]] -name = "pyasn1" -version = "0.6.0" -description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, -] - -[[package]] -name = "pyasn1-modules" -version = "0.4.0" -description = "A collection of ASN.1-based protocols modules" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pyavro-gen" -version = "0.3.3" -description = "A typed class generator for Avro Schemata" -optional = false -python-versions = "*" -files = [ - {file = "pyavro-gen-0.3.3.tar.gz", hash = "sha256:0e2b71c7c3c147326f555ecffcb6b2d5af4f1760b42a85f53a4fe85879f30a69"}, - {file = "pyavro_gen-0.3.3-py3-none-any.whl", hash = "sha256:452f6acb178bf7d7d9eb3c78d1978bfeecefdb3fa2937a4baf3542ae28b6dc49"}, -] - -[package.dependencies] -avro-preprocessor = ">=0.1.12" -dataclasses-avroschema = ">=0.37.1" -factory-boy = ">=3.2.1" -faker = ">=15.1.1" -isort = ">=5.10.1" -networkx = ">=2.8.7" -pygments = ">=2.13.0" -pytz = ">=2022.5" -undictify = ">=0.11.3" - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pydantic" -version = "2.10.2" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, -] - -[package.dependencies] -annotated-types = ">=0.6.0" -email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.27.1" -typing-extensions = ">=4.12.2" - -[package.extras] -email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] - -[[package]] -name = "pydantic-core" -version = "2.27.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pyjwt" -version = "2.10.1" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, - {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - -[[package]] -name = "python-box" -version = "7.2.0" -description = "Advanced Python dictionaries with dot notation access" -optional = false -python-versions = ">=3.8" -files = [ - {file = "python_box-7.2.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:6bdeec791e25258351388b3029a3ec5da302bb9ed3be175493c43cdc6c47f5e3"}, - {file = "python_box-7.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c449f7b3756a71479fa9c61a86e344ac00ed782a66d7662590f0afa294249d18"}, - {file = "python_box-7.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:6b0d61f182d394106d963232854e495b51edc178faa5316a797be1178212d7e0"}, - {file = "python_box-7.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e2d752de8c1204255bf7b0c814c59ef48293c187a7e9fdcd2fefa28024b72032"}, - {file = "python_box-7.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a6c35ea356a386077935958a5debcd5b229b9a1b3b26287a52dfe1a7e65d99"}, - {file = "python_box-7.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:32ed58ec4d9e5475efe69f9c7d773dfea90a6a01979e776da93fd2b0a5d04429"}, - {file = "python_box-7.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a2d664c6a27f7515469b6f1e461935a2038ee130b7d194b4b4db4e85d363618"}, - {file = "python_box-7.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5a7365db1aaf600d3e8a2747fcf6833beb5d45439a54318548f02e302e3ec"}, - {file = "python_box-7.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:739f827056ea148cbea3122d4617c994e829b420b1331183d968b175304e3a4f"}, - {file = "python_box-7.2.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:2617ef3c3d199f55f63c908f540a4dc14ced9b18533a879e6171c94a6a436f23"}, - {file = "python_box-7.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd866bed03087b1d8340014da8c3aaae19135767580641df1b4ae6fff6ac0aa"}, - {file = "python_box-7.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:9681f059e7e92bdf20782cd9ea6e533d4711fc7b8c57a462922a025d46add4d0"}, - {file = "python_box-7.2.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:6b59b1e2741c9ceecdf5a5bd9b90502c24650e609cd824d434fed3b6f302b7bb"}, - {file = "python_box-7.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23fae825d809ae7520fdeac88bb52be55a3b63992120a00e381783669edf589"}, - {file = "python_box-7.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:573b1abdcb7bd745fa404444f060ee62fc35a74f067181e55dcb43cfe92f2827"}, - {file = "python_box-7.2.0-py3-none-any.whl", hash = "sha256:a3c90832dd772cb0197fdb5bc06123b6e1b846899a1b53d9c39450d27a584829"}, - {file = "python_box-7.2.0.tar.gz", hash = "sha256:551af20bdab3a60a2a21e3435120453c4ca32f7393787c3a5036e1d9fc6a0ede"}, -] - -[package.extras] -all = ["msgpack", "ruamel.yaml (>=0.17)", "toml"] -msgpack = ["msgpack"] -pyyaml = ["PyYAML"] -ruamel-yaml = ["ruamel.yaml (>=0.17)"] -toml = ["toml"] -tomli = ["tomli", "tomli-w"] -yaml = ["ruamel.yaml (>=0.17)"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-gitlab" -version = "5.1.0" -description = "A python wrapper for the GitLab API" -optional = false -python-versions = ">=3.9.0" -files = [ - {file = "python_gitlab-5.1.0-py3-none-any.whl", hash = "sha256:c30cf547392ce66daaaf020839cfb6c15a91b26e2e7054d1b3f1b92e8dd65e7d"}, - {file = "python_gitlab-5.1.0.tar.gz", hash = "sha256:d5a10dae8328f32fb9214bd3f9dc199b4930cd496f81c9be42a0f8ff338aeb35"}, -] - -[package.dependencies] -requests = ">=2.32.0" -requests-toolbelt = ">=1.0.0" - -[package.extras] -autocompletion = ["argcomplete (>=1.10.0,<3)"] -graphql = ["gql[httpx] (>=3.5.0,<4)"] -yaml = ["PyYaml (>=6.0.1)"] - -[[package]] -name = "python-jsonpath" -version = "1.2.0" -description = "JSONPath, JSON Pointer and JSON Patch for Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "python_jsonpath-1.2.0-py3-none-any.whl", hash = "sha256:3172c7b87098fced1ed84bd3492bd1a19ef1ad41d4f5b8a3e9a147c750ac08b3"}, - {file = "python_jsonpath-1.2.0.tar.gz", hash = "sha256:a29a84ec3ac38e5dcaa62ac2a215de72c4eb60cb1303e10700da980cf7873775"}, -] - -[[package]] -name = "python-ulid" -version = "3.0.0" -description = "Universally unique lexicographically sortable identifier" -optional = false -python-versions = ">=3.9" -files = [ - {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, - {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, -] - -[package.extras] -pydantic = ["pydantic (>=2.0)"] - -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "redis" -version = "5.2.0" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.8" -files = [ - {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, - {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, -] - -[package.extras] -hiredis = ["hiredis (>=3.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] - -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "rsa" -version = "4.9" -description = "Pure-Python RSA implementation" -optional = false -python-versions = ">=3.6,<4" -files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, -] - -[package.dependencies] -pyasn1 = ">=0.1.3" - -[[package]] -name = "ruamel-yaml" -version = "0.18.6" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} - -[package.extras] -docs = ["mercurial (>5.7)", "ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.8" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, -] - -[[package]] -name = "sanic" -version = "24.6.0" -description = "A web server and web framework that's written to go fast. Build fast. Run fast." -optional = false -python-versions = ">=3.8" -files = [ - {file = "sanic-24.6.0-py3-none-any.whl", hash = "sha256:e2c6b392e213d85d9843cf27c64e3f2dacb3ec5c31c8c7ade4c404cd3030e994"}, - {file = "sanic-24.6.0.tar.gz", hash = "sha256:2e0841e2c8c28e68a0e6fc570c42aafbbe3b385d7141b9f96997d9d6c17d7afb"}, -] - -[package.dependencies] -aiofiles = ">=0.6.0" -html5tagger = ">=1.2.1" -httptools = ">=0.0.10" -multidict = ">=5.0,<7.0" -sanic-ext = {version = "*", optional = true, markers = "extra == \"ext\""} -sanic-routing = ">=23.12.0" -setuptools = ">=70.1.0" -tracerite = ">=1.0.0" -typing-extensions = ">=4.4.0" -ujson = {version = ">=1.35", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} -uvloop = {version = ">=0.15.0", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} -websockets = ">=10.0" - -[package.extras] -all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)", "towncrier", "tox", "types-ujson", "uvicorn"] -dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson", "uvicorn"] -docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)"] -ext = ["sanic-ext"] -http3 = ["aioquic"] -test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson", "uvicorn"] - -[[package]] -name = "sanic-ext" -version = "23.12.0" -description = "Extend your Sanic installation with some core functionality." -optional = false -python-versions = "*" -files = [ - {file = "sanic-ext-23.12.0.tar.gz", hash = "sha256:42fc41e7fafa58f3b790f685f3dd8a8de281460b4169d0e91f4e11b8747f845c"}, - {file = "sanic_ext-23.12.0-py3-none-any.whl", hash = "sha256:3ba2c143d7c41d89b87a11c6214b9d9b52c3994ff8ce3a03792b54ec5627e2c3"}, -] - -[package.dependencies] -pyyaml = ">=3.0.0" - -[package.extras] -dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] -test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] - -[[package]] -name = "sanic-routing" -version = "23.12.0" -description = "Core routing component for Sanic" -optional = false -python-versions = "*" -files = [ - {file = "sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}, - {file = "sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}, -] - -[[package]] -name = "sentry-sdk" -version = "2.19.0" -description = "Python client for Sentry (https://sentry.io)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"}, - {file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"}, -] - -[package.dependencies] -certifi = "*" -sanic = {version = ">=0.8", optional = true, markers = "extra == \"sanic\""} -urllib3 = ">=1.26.11" - -[package.extras] -aiohttp = ["aiohttp (>=3.5)"] -anthropic = ["anthropic (>=0.16)"] -arq = ["arq (>=0.23)"] -asyncpg = ["asyncpg (>=0.23)"] -beam = ["apache-beam (>=2.12)"] -bottle = ["bottle (>=0.12.13)"] -celery = ["celery (>=3)"] -celery-redbeat = ["celery-redbeat (>=2)"] -chalice = ["chalice (>=1.16.0)"] -clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] -django = ["django (>=1.8)"] -falcon = ["falcon (>=1.4)"] -fastapi = ["fastapi (>=0.79.0)"] -flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] -grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] -http2 = ["httpcore[http2] (==1.*)"] -httpx = ["httpx (>=0.16.0)"] -huey = ["huey (>=2)"] -huggingface-hub = ["huggingface_hub (>=0.22)"] -langchain = ["langchain (>=0.0.210)"] -launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] -litestar = ["litestar (>=2.0.0)"] -loguru = ["loguru (>=0.5)"] -openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] -openfeature = ["openfeature-sdk (>=0.7.1)"] -opentelemetry = ["opentelemetry-distro (>=0.35b0)"] -opentelemetry-experimental = ["opentelemetry-distro"] -pure-eval = ["asttokens", "executing", "pure_eval"] -pymongo = ["pymongo (>=3.1)"] -pyspark = ["pyspark (>=2.4.4)"] -quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] -rq = ["rq (>=0.6)"] -sanic = ["sanic (>=0.8)"] -sqlalchemy = ["sqlalchemy (>=1.2)"] -starlette = ["starlette (>=0.19.1)"] -starlite = ["starlite (>=1.48)"] -tornado = ["tornado (>=6)"] - -[[package]] -name = "setuptools" -version = "75.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - -[[package]] -name = "sqlalchemy" -version = "2.0.36" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "tenacity" -version = "9.0.0" -description = "Retry code until it succeeds" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tracerite" -version = "1.1.1" -description = "Human-readable HTML tracebacks for Python exceptions" -optional = false -python-versions = "*" -files = [ - {file = "tracerite-1.1.1-py3-none-any.whl", hash = "sha256:3a787a9ecb1a136ea9ce17e6328e414ec414a4f644130af4e1e330bec2dece29"}, - {file = "tracerite-1.1.1.tar.gz", hash = "sha256:6400a35a187747189e4bb8d4a8e471bd86d14dbdcc94bcad23f4eda023f41356"}, -] - -[package.dependencies] -html5tagger = ">=1.2.1" - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.1" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, -] - -[[package]] -name = "ujson" -version = "5.10.0" -description = "Ultra fast JSON encoder and decoder for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, - {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, - {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, - {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, - {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, - {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, - {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, - {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, - {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, - {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, - {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, - {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, - {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, - {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, - {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, - {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, - {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, - {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, - {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, - {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, - {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, - {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, - {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, - {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, - {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, - {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, - {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, - {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, - {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, - {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, - {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, - {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, - {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, - {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, - {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, -] - -[[package]] -name = "undictify" -version = "0.11.3" -description = "Type-checked function calls at runtime" -optional = false -python-versions = "*" -files = [ - {file = "undictify-0.11.3-py3-none-any.whl", hash = "sha256:4bfdc075b2f06ee027b05e241434c8efcbebf6c83fcc5b8d9d8def56dab4b5ff"}, - {file = "undictify-0.11.3.tar.gz", hash = "sha256:1481170ed8b9862c033e7549d817b90cead6002677c602d1bbdbf8ea15100098"}, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "uvloop" -version = "0.21.0" -description = "Fast implementation of asyncio event loop on top of libuv" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, - {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, - {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, - {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, - {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, - {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, - {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, - {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, - {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, - {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, - {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, - {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, - {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, - {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, - {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, - {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, - {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, - {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, - {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, - {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, - {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, - {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, - {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, -] - -[package.extras] -dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] -docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] - -[[package]] -name = "websocket-client" -version = "1.8.0" -description = "WebSocket client for Python with low level API options" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, - {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, -] - -[package.extras] -docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] -optional = ["python-socks", "wsaccel"] -test = ["websockets"] - -[[package]] -name = "websockets" -version = "12.0" -description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" -optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, -] - -[[package]] -name = "werkzeug" -version = "3.1.3" -description = "The comprehensive WSGI web application library." -optional = false -python-versions = ">=3.9" -files = [ - {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, - {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, -] - -[package.dependencies] -MarkupSafe = ">=2.1.1" - -[package.extras] -watchdog = ["watchdog (>=2.3)"] - -[[package]] -name = "wsproto" -version = "1.2.0" -description = "WebSockets state-machine based protocol implementation" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, - {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, -] - -[package.dependencies] -h11 = ">=0.9.0,<1" - -[metadata] -lock-version = "2.0" -python-versions = "^3.12" -content-hash = "273ad6c57d970193dac8601d04232540a8dbda1e19b384600ef6123d922cbf6d" diff --git a/projects/k8s_watcher/Dockerfile b/projects/k8s_watcher/Dockerfile new file mode 100644 index 000000000..357dedb57 --- /dev/null +++ b/projects/k8s_watcher/Dockerfile @@ -0,0 +1,41 @@ +FROM python:3.13-bookworm AS builder +ARG DEV_BUILD=false +ARG USER_UID=1000 +ARG USER_GID=$USER_UID + +RUN groupadd --gid $USER_GID renku && \ + DEBIAN_FRONTEND=noninteractive adduser --gid $USER_GID --uid $USER_UID renku +USER $USER_UID:$USER_GID +WORKDIR /app +RUN python3 -m pip install --user pipx && \ + python3 -m pipx ensurepath && \ + /home/renku/.local/bin/pipx install poetry && \ + /home/renku/.local/bin/pipx install virtualenv && \ + /home/renku/.local/bin/virtualenv env && \ + /home/renku/.local/bin/poetry self add poetry-multiproject-plugin && \ + /home/renku/.local/bin/poetry self add poetry-polylith-plugin && \ + /home/renku/.local/bin/poetry self add poetry-plugin-export + +COPY --chown=$USER_UID:$USER_GID . . +RUN if $DEV_BUILD ; then \ + /home/renku/.local/bin/poetry export -o requirements.txt --with dev; \ + else \ + /home/renku/.local/bin/poetry export -o requirements.txt; \ + fi && \ + env/bin/pip install -r requirements.txt +RUN /home/renku/.local/bin/poetry -C projects/k8s_watcher build-project -f wheel --custom-temp-path=/tmp +RUN env/bin/pip --no-cache-dir install projects/k8s_watcher/dist/*.whl + +FROM python:3.13-slim-bookworm +ARG USER_UID=1000 +ARG USER_GID=$USER_UID +RUN mkdir /prometheus && chown $USER_UID:$USER_GID /prometheus +RUN apt-get update && apt-get install -y \ + tini vim-tiny procps curl && \ + rm -rf /var/lib/apt/lists/* && \ + groupadd --gid $USER_GID renku && \ + adduser --gid $USER_GID --uid $USER_UID renku +USER $USER_UID:$USER_GID +WORKDIR /app +COPY --from=builder /app/env ./env +ENTRYPOINT ["tini", "-g", "--", "env/bin/python", "-m", "renku_data_services.k8s_cache.main"] diff --git a/projects/k8s_watcher/poetry.lock b/projects/k8s_watcher/poetry.lock new file mode 100644 index 000000000..30dc1ce9a --- /dev/null +++ b/projects/k8s_watcher/poetry.lock @@ -0,0 +1,3420 @@ +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "aiofile" +version = "3.9.0" +description = "Asynchronous file operations." +optional = false +python-versions = "<4,>=3.8" +groups = ["main"] +files = [ + {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, + {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, +] + +[package.dependencies] +caio = ">=0.9.0,<0.10.0" + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.11.18" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868"}, + {file = "aiohttp-3.11.18-cp310-cp310-win32.whl", hash = "sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f"}, + {file = "aiohttp-3.11.18-cp310-cp310-win_amd64.whl", hash = "sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd"}, + {file = "aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d"}, + {file = "aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea"}, + {file = "aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8"}, + {file = "aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7"}, + {file = "aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78"}, + {file = "aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935"}, + {file = "aiohttp-3.11.18-cp39-cp39-win32.whl", hash = "sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc"}, + {file = "aiohttp-3.11.18-cp39-cp39-win_amd64.whl", hash = "sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef"}, + {file = "aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.3.2" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alembic" +version = "1.15.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "alembic-1.15.2-py3-none-any.whl", hash = "sha256:2e76bd916d547f6900ec4bb5a90aeac1485d2c92536923d0b138c02b126edc53"}, + {file = "alembic-1.15.2.tar.gz", hash = "sha256:1c72391bbdeffccfe317eefba686cb9a3c078005478885413b95c3b26c57a8a7"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.4.0" +typing-extensions = ">=4.12" + +[package.extras] +tz = ["tzdata"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.9.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "argcomplete" +version = "3.6.2" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, + {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, +] + +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + +[[package]] +name = "asyncache" +version = "0.3.1" +description = "Helpers to use cachetools with async code." +optional = false +python-versions = ">=3.8,<4.0" +groups = ["main"] +files = [ + {file = "asyncache-0.3.1-py3-none-any.whl", hash = "sha256:ef20a1024d265090dd1e0785c961cf98b9c32cc7d9478973dcf25ac1b80011f5"}, + {file = "asyncache-0.3.1.tar.gz", hash = "sha256:9a1e60a75668e794657489bdea6540ee7e3259c483517b934670db7600bf5035"}, +] + +[package.dependencies] +cachetools = ">=5.2.0,<6.0.0" + +[[package]] +name = "asyncpg" +version = "0.30.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, + {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"}, + {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"}, + {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"}, + {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"}, + {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"}, + {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"}, + {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"}, + {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"}, + {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"}, + {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"}, + {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"}, + {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"}, + {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"}, + {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"}, +] + +[package.extras] +docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "authlib" +version = "1.5.2" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "authlib-1.5.2-py2.py3-none-any.whl", hash = "sha256:8804dd4402ac5e4a0435ac49e0b6e19e395357cfa632a3f624dcb4f6df13b4b1"}, + {file = "authlib-1.5.2.tar.gz", hash = "sha256:fe85ec7e50c5f86f1e2603518bb3b4f632985eb4a355e52256530790e326c512"}, +] + +[package.dependencies] +cryptography = "*" + +[[package]] +name = "authzed" +version = "1.21.1" +description = "Client library for SpiceDB." +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "authzed-1.21.1-py3-none-any.whl", hash = "sha256:9a830c0e9eefc506181f0d82c9a9f73405db46d50e8ecaedd4488486a2792959"}, + {file = "authzed-1.21.1.tar.gz", hash = "sha256:c354d19af5ef1a393381d5be670dd946916742573ae2bf3ac87becdbf44f093b"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.65.0,<2.0.0" +grpc-interceptor = ">=0.15.4,<0.16.0" +grpcio = ">=1.63,<2.0" +protobuf = ">=5.26,<6" +protovalidate = ">=0.7.1,<0.8.0" + +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] + +[[package]] +name = "caio" +version = "0.9.24" +description = "Asynchronous file IO for Linux MacOS or Windows." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "caio-0.9.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d80322126a97ba572412b17b2f086ff95195de2c4261deb19db6bfcdc9ef7540"}, + {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:37bc172349686139e8dc97fff7662c67b1837e18a67b99e8ef25585f2893d013"}, + {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ad7f0902bf952237e120606252c14ab3cb05995c9f79f39154b5248744864832"}, + {file = "caio-0.9.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:925b9e3748ce1a79386dfb921c0aee450e43225534551abd1398b1c08f9ba29f"}, + {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:3b4dc0a8fb9a58ab40f967ad5a8a858cc0bfb2348a580b4142595849457f9c9a"}, + {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fa74d111b3b165bfad2e333367976bdf118bcf505a1cb44d3bcddea2849e3297"}, + {file = "caio-0.9.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae3566228383175265a7583107f21a7cb044a752ea29ba84fce7c1a49a05903"}, + {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:a306b0dda91cb4ca3170f066c114597f8ea41b3da578574a9d2b54f86963de68"}, + {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:8ee158e56128d865fb7d57a9c9c22fca4e8aa8d8664859c977a36fff3ccb3609"}, + {file = "caio-0.9.24-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d47ef8d76aca74c17cb07339a441c5530fc4b8dd9222dfb1e1abd7f9f9b814f"}, + {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:d15fc746c4bf0077d75df05939d1e97c07ccaa8e580681a77021d6929f65d9f4"}, + {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9368eae0a9badd5f31264896c51b47431d96c0d46f1979018fb1d20c49f56156"}, + {file = "caio-0.9.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f0e5a645ef4e7bb7a81e10ae2a7aef14988cb2cb4354588c6bf6f6f3f6de72a"}, + {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:08304fa80af7771c78a5bcc923449c7ec8134d589b50d48c66320f85552c7ae2"}, + {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:5339ced0764e10242a50ccb21db7f0d9c359881db0f72fa2c5e45ed828ffacf7"}, + {file = "caio-0.9.24.tar.gz", hash = "sha256:5bcdecaea02a9aa8e3acf0364eff8ad9903d57d70cdb274a42270126290a77f1"}, +] + +[package.extras] +develop = ["aiomisc-pytest", "coveralls", "pylama[toml]", "pytest", "pytest-cov", "setuptools"] + +[[package]] +name = "casefy" +version = "1.0.0" +description = "Utilities for string case conversion." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "casefy-1.0.0-py3-none-any.whl", hash = "sha256:c89f96fb0fbd13691073b7a65c1e668e81453247d647479a3db105e86d7b0df9"}, + {file = "casefy-1.0.0.tar.gz", hash = "sha256:bc99428475c2089c5f6a21297b4cfe4e83dff132cf3bb06655ddcb90632af1ed"}, +] + +[[package]] +name = "cel-python" +version = "0.2.0" +description = "Pure Python implementation of Google Common Expression Language" +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "cel_python-0.2.0-py3-none-any.whl", hash = "sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60"}, + {file = "cel_python-0.2.0.tar.gz", hash = "sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f"}, +] + +[package.dependencies] +jmespath = ">=1.0.1,<2.0.0" +lark = ">=0.12.0,<0.13.0" +python-dateutil = ">=2.9.0.post0,<3.0.0" +pyyaml = ">=6.0.1,<7.0.0" +types-python-dateutil = ">=2.9.0.20240316,<3.0.0.0" +types-pyyaml = ">=6.0.12.20240311,<7.0.0.0" + +[[package]] +name = "certifi" +version = "2025.1.31" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "44.0.2" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +files = [ + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "dacite" +version = "1.9.2" +description = "Simple creation of data classes from dictionaries." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "dacite-1.9.2-py3-none-any.whl", hash = "sha256:053f7c3f5128ca2e9aceb66892b1a3c8936d02c686e707bee96e19deef4bc4a0"}, + {file = "dacite-1.9.2.tar.gz", hash = "sha256:6ccc3b299727c7aa17582f0021f6ae14d5de47c7227932c47fec4cdfefd26f09"}, +] + +[package.extras] +dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] + +[[package]] +name = "dataclasses-avroschema" +version = "0.65.10" +description = "Generate Avro Schemas from Python classes. Serialize/Deserialize python instances with avro schemas" +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "dataclasses_avroschema-0.65.10-py3-none-any.whl", hash = "sha256:5bb3cab29c990ab013001dbf5d81baf8bc66aca9703a3bae49165e825d8e1943"}, + {file = "dataclasses_avroschema-0.65.10.tar.gz", hash = "sha256:a6d7178d3db2a1f3a95469e6ac57df77226e003b09026c715930bd8c4f87474c"}, +] + +[package.dependencies] +casefy = ">=0.1.7,<1.1.0" +dacite = ">=1.8.0,<2.0.0" +fastavro = ">=1.7.3,<2.0.0" +inflection = ">=0.5.1,<0.6.0" +python-dateutil = ">=2.7,<3.0" +typing-extensions = ">=4.2.0,<5.0.0" + +[package.extras] +cli = ["dc-avro (>=0.6.4)"] +faker = ["faker (>=26.0.0,<37.0.0)"] +faust = ["faust-streaming (>=0.10.11,<0.12.0)"] +pydantic = ["pydantic[email] (>=2.4.2,<3.0.0)"] + +[[package]] +name = "datamodel-code-generator" +version = "0.24.2" +description = "Datamodel Code Generator" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "datamodel_code_generator-0.24.2-py3-none-any.whl", hash = "sha256:582c30466def12600d7165c5f624bb63a7e944eeaf8320f282518daf9ccb566c"}, + {file = "datamodel_code_generator-0.24.2.tar.gz", hash = "sha256:d278c751038c8911efc82856ec549ac1e3e13134567387a4bb5ab7ddc6543162"}, +] + +[package.dependencies] +argcomplete = ">=1.10,<4.0" +black = ">=19.10b0" +genson = ">=1.2.1,<2.0" +inflect = ">=4.1.0,<6.0" +isort = ">=4.3.21,<6.0" +jinja2 = ">=2.10.1,<4.0" +packaging = "*" +pydantic = {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.12\" and python_version < \"4.0\""} +pyyaml = ">=6.0.1" + +[package.extras] +debug = ["PySnooper (>=0.4.1,<2.0.0)"] +http = ["httpx"] +validation = ["openapi-spec-validator (>=0.2.8,<0.7.0)", "prance (>=0.18.2)"] + +[[package]] +name = "deepmerge" +version = "2.0" +description = "A toolset for deeply merging Python dictionaries." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00"}, + {file = "deepmerge-2.0.tar.gz", hash = "sha256:5c3d86081fbebd04dd5de03626a0607b809a98fb6ccba5770b62466fe940ff20"}, +] + +[package.extras] +dev = ["black", "build", "mypy", "pytest", "pyupgrade", "twine", "validate-pyproject[all]"] + +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "durationpy" +version = "0.9" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "escapism" +version = "1.0.1" +description = "Simple, generic API for escaping strings." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "escapism-1.0.1-py2.py3-none-any.whl", hash = "sha256:d28f19edc3cb1ffc36fa238956ecc068695477e748f57157c6dde00a6b77f229"}, + {file = "escapism-1.0.1.tar.gz", hash = "sha256:73256bdfb4f22230f0428fc6efecee61cdc4fad531b6f98b849cb9c80711e4ec"}, +] + +[[package]] +name = "fastavro" +version = "1.10.0" +description = "Fast read/write of AVRO files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "fastavro-1.10.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1a9fe0672d2caf0fe54e3be659b13de3cad25a267f2073d6f4b9f8862acc31eb"}, + {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86dd0410770e0c99363788f0584523709d85e57bb457372ec5c285a482c17fe6"}, + {file = "fastavro-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:190e80dc7d77d03a6a8597a026146b32a0bbe45e3487ab4904dc8c1bebecb26d"}, + {file = "fastavro-1.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bf570d63be9155c3fdc415f60a49c171548334b70fff0679a184b69c29b6bc61"}, + {file = "fastavro-1.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e07abb6798e95dccecaec316265e35a018b523d1f3944ad396d0a93cb95e0a08"}, + {file = "fastavro-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:37203097ed11d0b8fd3c004904748777d730cafd26e278167ea602eebdef8eb2"}, + {file = "fastavro-1.10.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d183c075f527ab695a27ae75f210d4a86bce660cda2f85ae84d5606efc15ef50"}, + {file = "fastavro-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7a95a2c0639bffd7c079b59e9a796bfc3a9acd78acff7088f7c54ade24e4a77"}, + {file = "fastavro-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a678153b5da1b024a32ec3f611b2e7afd24deac588cb51dd1b0019935191a6d"}, + {file = "fastavro-1.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:67a597a5cfea4dddcf8b49eaf8c2b5ffee7fda15b578849185bc690ec0cd0d8f"}, + {file = "fastavro-1.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1fd689724760b17f69565d8a4e7785ed79becd451d1c99263c40cb2d6491f1d4"}, + {file = "fastavro-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:4f949d463f9ac4221128a51e4e34e2562f401e5925adcadfd28637a73df6c2d8"}, + {file = "fastavro-1.10.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:cfe57cb0d72f304bd0dcc5a3208ca6a7363a9ae76f3073307d095c9d053b29d4"}, + {file = "fastavro-1.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74e517440c824cb65fb29d3e3903a9406f4d7c75490cef47e55c4c82cdc66270"}, + {file = "fastavro-1.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:203c17d44cadde76e8eecb30f2d1b4f33eb478877552d71f049265dc6f2ecd10"}, + {file = "fastavro-1.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6575be7f2b5f94023b5a4e766b0251924945ad55e9a96672dc523656d17fe251"}, + {file = "fastavro-1.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe471deb675ed2f01ee2aac958fbf8ebb13ea00fa4ce7f87e57710a0bc592208"}, + {file = "fastavro-1.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:567ff515f2a5d26d9674b31c95477f3e6022ec206124c62169bc2ffaf0889089"}, + {file = "fastavro-1.10.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:82263af0adfddb39c85f9517d736e1e940fe506dfcc35bc9ab9f85e0fa9236d8"}, + {file = "fastavro-1.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:566c193109ff0ff84f1072a165b7106c4f96050078a4e6ac7391f81ca1ef3efa"}, + {file = "fastavro-1.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e400d2e55d068404d9fea7c5021f8b999c6f9d9afa1d1f3652ec92c105ffcbdd"}, + {file = "fastavro-1.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9b8227497f71565270f9249fc9af32a93644ca683a0167cfe66d203845c3a038"}, + {file = "fastavro-1.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8e62d04c65461b30ac6d314e4197ad666371e97ae8cb2c16f971d802f6c7f514"}, + {file = "fastavro-1.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:86baf8c9740ab570d0d4d18517da71626fe9be4d1142bea684db52bd5adb078f"}, + {file = "fastavro-1.10.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5bccbb6f8e9e5b834cca964f0e6ebc27ebe65319d3940b0b397751a470f45612"}, + {file = "fastavro-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0132f6b0b53f61a0a508a577f64beb5de1a5e068a9b4c0e1df6e3b66568eec4"}, + {file = "fastavro-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca37a363b711202c6071a6d4787e68e15fa3ab108261058c4aae853c582339af"}, + {file = "fastavro-1.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:cf38cecdd67ca9bd92e6e9ba34a30db6343e7a3bedf171753ee78f8bd9f8a670"}, + {file = "fastavro-1.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f4dd10e0ed42982122d20cdf1a88aa50ee09e5a9cd9b39abdffb1aa4f5b76435"}, + {file = "fastavro-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:aaef147dc14dd2d7823246178fd06fc5e477460e070dc6d9e07dd8193a6bc93c"}, + {file = "fastavro-1.10.0.tar.gz", hash = "sha256:47bf41ac6d52cdfe4a3da88c75a802321321b37b663a900d12765101a5d6886f"}, +] + +[package.extras] +codecs = ["cramjam", "lz4", "zstandard"] +lz4 = ["lz4"] +snappy = ["cramjam"] +zstandard = ["zstandard"] + +[[package]] +name = "frozenlist" +version = "1.6.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3"}, + {file = "frozenlist-1.6.0-cp310-cp310-win32.whl", hash = "sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812"}, + {file = "frozenlist-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e"}, + {file = "frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860"}, + {file = "frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770"}, + {file = "frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc"}, + {file = "frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e"}, + {file = "frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4"}, + {file = "frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c"}, + {file = "frozenlist-1.6.0-cp39-cp39-win32.whl", hash = "sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530"}, + {file = "frozenlist-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572"}, + {file = "frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191"}, + {file = "frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68"}, +] + +[[package]] +name = "genson" +version = "1.3.0" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, + {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, +] + +[[package]] +name = "google-auth" +version = "2.39.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_auth-2.39.0-py2.py3-none-any.whl", hash = "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2"}, + {file = "google_auth-2.39.0.tar.gz", hash = "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] + +[[package]] +name = "greenlet" +version = "3.2.1" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +files = [ + {file = "greenlet-3.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:777c1281aa7c786738683e302db0f55eb4b0077c20f1dc53db8852ffaea0a6b0"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3059c6f286b53ea4711745146ffe5a5c5ff801f62f6c56949446e0f6461f8157"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1a40a17e2c7348f5eee5d8e1b4fa6a937f0587eba89411885a36a8e1fc29bd2"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5193135b3a8d0017cb438de0d49e92bf2f6c1c770331d24aa7500866f4db4017"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639a94d001fe874675b553f28a9d44faed90f9864dc57ba0afef3f8d76a18b04"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fe303381e7e909e42fb23e191fc69659910909fdcd056b92f6473f80ef18543"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:72c9b668454e816b5ece25daac1a42c94d1c116d5401399a11b77ce8d883110c"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6079ae990bbf944cf66bea64a09dcb56085815630955109ffa98984810d71565"}, + {file = "greenlet-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:e63cd2035f49376a23611fbb1643f78f8246e9d4dfd607534ec81b175ce582c2"}, + {file = "greenlet-3.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:aa30066fd6862e1153eaae9b51b449a6356dcdb505169647f69e6ce315b9468b"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b0f3a0a67786facf3b907a25db80efe74310f9d63cc30869e49c79ee3fcef7e"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64a4d0052de53ab3ad83ba86de5ada6aeea8f099b4e6c9ccce70fb29bc02c6a2"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852ef432919830022f71a040ff7ba3f25ceb9fe8f3ab784befd747856ee58530"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4818116e75a0dd52cdcf40ca4b419e8ce5cb6669630cb4f13a6c384307c9543f"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9afa05fe6557bce1642d8131f87ae9462e2a8e8c46f7ed7929360616088a3975"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5c12f0d17a88664757e81a6e3fc7c2452568cf460a2f8fb44f90536b2614000b"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dbb4e1aa2000852937dd8f4357fb73e3911da426df8ca9b8df5db231922da474"}, + {file = "greenlet-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:cb5ee928ce5fedf9a4b0ccdc547f7887136c4af6109d8f2fe8e00f90c0db47f5"}, + {file = "greenlet-3.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0ba2811509a30e5f943be048895a983a8daf0b9aa0ac0ead526dfb5d987d80ea"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4245246e72352b150a1588d43ddc8ab5e306bef924c26571aafafa5d1aaae4e8"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7abc0545d8e880779f0c7ce665a1afc3f72f0ca0d5815e2b006cafc4c1cc5840"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dcc6d604a6575c6225ac0da39df9335cc0c6ac50725063fa90f104f3dbdb2c9"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2273586879affca2d1f414709bb1f61f0770adcabf9eda8ef48fd90b36f15d12"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff38c869ed30fff07f1452d9a204ece1ec6d3c0870e0ba6e478ce7c1515acf22"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e934591a7a4084fa10ee5ef50eb9d2ac8c4075d5c9cf91128116b5dca49d43b1"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:063bcf7f8ee28eb91e7f7a8148c65a43b73fbdc0064ab693e024b5a940070145"}, + {file = "greenlet-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7132e024ebeeeabbe661cf8878aac5d2e643975c4feae833142592ec2f03263d"}, + {file = "greenlet-3.2.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:e1967882f0c42eaf42282a87579685c8673c51153b845fde1ee81be720ae27ac"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77ae69032a95640a5fe8c857ec7bee569a0997e809570f4c92048691ce4b437"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3227c6ec1149d4520bc99edac3b9bc8358d0034825f3ca7572165cb502d8f29a"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ddda0197c5b46eedb5628d33dad034c455ae77708c7bf192686e760e26d6a0c"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de62b542e5dcf0b6116c310dec17b82bb06ef2ceb696156ff7bf74a7a498d982"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c07a0c01010df42f1f058b3973decc69c4d82e036a951c3deaf89ab114054c07"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2530bfb0abcd451ea81068e6d0a1aac6dabf3f4c23c8bd8e2a8f579c2dd60d95"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c472adfca310f849903295c351d297559462067f618944ce2650a1878b84123"}, + {file = "greenlet-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:24a496479bc8bd01c39aa6516a43c717b4cee7196573c47b1f8e1011f7c12495"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175d583f7d5ee57845591fc30d852b75b144eb44b05f38b67966ed6df05c8526"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecc9d33ca9428e4536ea53e79d781792cee114d2fa2695b173092bdbd8cd6d5"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f56382ac4df3860ebed8ed838f268f03ddf4e459b954415534130062b16bc32"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc45a7189c91c0f89aaf9d69da428ce8301b0fd66c914a499199cfb0c28420fc"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51a2f49da08cff79ee42eb22f1658a2aed60c72792f0a0a95f5f0ca6d101b1fb"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:0c68bbc639359493420282d2f34fa114e992a8724481d700da0b10d10a7611b8"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:e775176b5c203a1fa4be19f91da00fd3bff536868b77b237da3f4daa5971ae5d"}, + {file = "greenlet-3.2.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d6668caf15f181c1b82fb6406f3911696975cc4c37d782e19cb7ba499e556189"}, + {file = "greenlet-3.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:17964c246d4f6e1327edd95e2008988a8995ae3a7732be2f9fc1efed1f1cdf8c"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b4ec7f65f0e4a1500ac475c9343f6cc022b2363ebfb6e94f416085e40dea15"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b38d53cf268da963869aa25a6e4cc84c1c69afc1ae3391738b2603d110749d01"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a7490f74e8aabc5f29256765a99577ffde979920a2db1f3676d265a3adba41"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4339b202ac20a89ccd5bde0663b4d00dc62dd25cb3fb14f7f3034dec1b0d9ece"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a750f1046994b9e038b45ae237d68153c29a3a783075211fb1414a180c8324b"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:374ffebaa5fbd10919cd599e5cf8ee18bae70c11f9d61e73db79826c8c93d6f9"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b89e5d44f55372efc6072f59ced5ed1efb7b44213dab5ad7e0caba0232c6545"}, + {file = "greenlet-3.2.1-cp39-cp39-win32.whl", hash = "sha256:b7503d6b8bbdac6bbacf5a8c094f18eab7553481a1830975799042f26c9e101b"}, + {file = "greenlet-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:e98328b8b8f160925d6b1c5b1879d8e64f6bd8cf11472b7127d579da575b77d9"}, + {file = "greenlet-3.2.1.tar.gz", hash = "sha256:9f4dd4b4946b14bb3bf038f81e1d2e535b7d94f1b2a59fdba1293cd9c1a0a4d7"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-interceptor" +version = "0.15.4" +description = "Simplifies gRPC interceptors" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926"}, + {file = "grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d"}, +] + +[package.dependencies] +grpcio = ">=1.49.1,<2.0.0" + +[package.extras] +testing = ["protobuf (>=4.21.9)"] + +[[package]] +name = "grpcio" +version = "1.71.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.71.0)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "html5tagger" +version = "1.3.0" +description = "Pythonic HTML generation/templating (no template files)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}, + {file = "html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}, +] + +[[package]] +name = "httpcore" +version = "1.0.8" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, + {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httptools" +version = "0.6.4" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, +] + +[package.extras] +test = ["Cython (>=0.29.24)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "httpx-ws" +version = "0.7.2" +description = "WebSockets support for HTTPX" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "httpx_ws-0.7.2-py3-none-any.whl", hash = "sha256:dd7bf9dbaa96dcd5cef1af3a7e1130cfac068bebecce25a74145022f5a8427a3"}, + {file = "httpx_ws-0.7.2.tar.gz", hash = "sha256:93edea6c8fc313464fc287bff7d2ad20e6196b7754c76f946f73b4af79886d4e"}, +] + +[package.dependencies] +anyio = ">=4" +httpcore = ">=1.0.4" +httpx = ">=0.23.1" +wsproto = "*" + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "inflect" +version = "5.6.2" +description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238"}, + {file = "inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] + +[[package]] +name = "inflection" +version = "0.5.1" +description = "A port of Ruby on Rails inflector to Python" +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, + {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, +] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "kr8s" +version = "0.20.7" +description = "A Kubernetes API library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "kr8s-0.20.7-py3-none-any.whl", hash = "sha256:e489b97ff513c167f427f479ad5420c78adffd1a6ce5033b079109374200c0c6"}, + {file = "kr8s-0.20.7.tar.gz", hash = "sha256:ac45e966beea0f6f92f635b3e61e64b8e27962b4825d77b814a663e819a8ec16"}, +] + +[package.dependencies] +anyio = ">=3.7.0" +asyncache = ">=0.3.1" +cryptography = ">=35" +httpx = ">=0.24.1" +httpx-ws = ">=0.7.0" +python-box = ">=7.0.1" +python-jsonpath = ">=0.7.1" +pyyaml = ">=6.0" +typing-extensions = ">=4.12.2" + +[package.extras] +docs = ["furo (>=2023.3.27)", "myst-parser (>=1.0.0)", "sphinx (>=5.3.0)", "sphinx-autoapi (>=2.1.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-copybutton (>=0.5.1)", "sphinx-design (>=0.3.0)", "sphinxcontrib-mermaid (>=0.8.1)"] +test = ["kubernetes (>=26.1.0)", "kubernetes-asyncio (>=24.2.3)", "kubernetes-validate (>=1.28.0)", "lightkube (>=0.13.0)", "pykube-ng (>=23.6.0)", "pytest (>=7.2.2)", "pytest-asyncio (>=0.20.3)", "pytest-cov (>=4.0.0)", "pytest-kind (>=22.11.1)", "pytest-rerunfailures (>=11.1.2)", "pytest-timeout (>=2.1.0)", "trio (>=0.22.0)", "types-pyyaml (>=6.0)"] + +[[package]] +name = "kubernetes" +version = "31.0.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, + {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +durationpy = ">=0.7" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "kubernetes-asyncio" +version = "32.3.0" +description = "Kubernetes asynchronous python client" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "kubernetes_asyncio-32.3.0-py3-none-any.whl", hash = "sha256:3a0769d4bf39c638e474c76cd22f4aa81903db5ebd14573c1e3b3b7ebbf86fbc"}, + {file = "kubernetes_asyncio-32.3.0.tar.gz", hash = "sha256:3efdc39776f4e1c892ce08b74364e67be6c1d6870cba01ab27bb296fdc6fc485"}, +] + +[package.dependencies] +aiohttp = ">=3.9.0,<4.0.0" +certifi = ">=14.05.14" +python-dateutil = ">=2.5.3" +pyyaml = ">=3.12" +six = ">=1.9.0" +urllib3 = ">=1.24.2" + +[[package]] +name = "lark" +version = "0.12.0" +description = "a modern parsing library" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "lark-0.12.0-py2.py3-none-any.whl", hash = "sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca"}, + {file = "lark-0.12.0.tar.gz", hash = "sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef"}, +] + +[package.extras] +atomic-cache = ["atomicwrites"] +nearley = ["js2py"] +regex = ["regex"] + +[[package]] +name = "mako" +version = "1.3.10" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown-code-runner" +version = "2.2.0" +description = "Automatically execute code blocks within a Markdown file and update the output in-place" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markdown_code_runner-2.2.0-py3-none-any.whl", hash = "sha256:d8812c48ad3fd4a3f3725dfcd5a1b7e5baf7216855eeea8a92c7fd9120717ac6"}, + {file = "markdown_code_runner-2.2.0.tar.gz", hash = "sha256:3c495998a437bc7d7a4b1a5ce518bce10cf5ba0fa69c569fee1e32c5238603c4"}, +] + +[package.extras] +test = ["coverage", "pre-commit", "pytest", "pytest-cov"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "marshmallow" +version = "3.26.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] +docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] +tests = ["pytest", "simplejson"] + +[[package]] +name = "multidict" +version = "6.4.3" +description = "multidict implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "parsy" +version = "2.1" +description = "Easy-to-use parser combinators, for parsing in pure Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "parsy-2.1-py3-none-any.whl", hash = "sha256:8f18e7b11985e7802e7e3ecbd8291c6ca243d29820b1186e4c84605db4efffa0"}, + {file = "parsy-2.1.tar.gz", hash = "sha256:fd5dd18d7b0b61f8275ee88665f430a20c02cf5a82d88557f35330530186d7ac"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.7" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "prometheus-client" +version = "0.7.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "prometheus_client-0.7.1.tar.gz", hash = "sha256:71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prometheus-sanic" +version = "3.0.0" +description = "Exposes Prometheus monitoring metrics of Sanic apps." +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "prometheus-sanic-3.0.0.tar.gz", hash = "sha256:06cfe8f9c843a1324fa801b9092f26470a63196b9e08fad0c0f12b49ddbf6c3c"}, + {file = "prometheus_sanic-3.0.0-py3-none-any.whl", hash = "sha256:499110bf2a86f921b229083e0bcea4d489420abf6737e0d838cd234394fd91aa"}, +] + +[package.dependencies] +prometheus-client = ">=0.7.1,<0.8.0" +sanic = ">=22.0.0" + +[[package]] +name = "propcache" +version = "0.3.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, + {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, + {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, + {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, + {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, + {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, + {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, + {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, + {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, + {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, + {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, + {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, + {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, + {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, + {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, +] + +[[package]] +name = "protobuf" +version = "5.29.4" +description = "" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, +] + +[[package]] +name = "protovalidate" +version = "0.7.1" +description = "Protocol Buffer Validation for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "protovalidate-0.7.1-py3-none-any.whl", hash = "sha256:6788b1baa10c2e9453c3a3eef5f87a3e9c871bc9a7110b506aefd764269c8b3e"}, + {file = "protovalidate-0.7.1.tar.gz", hash = "sha256:12bd7c126fc000c5cbee5bf0f4cd01e0ba0e353f585b0aaa68df03e788939412"}, +] + +[package.dependencies] +cel-python = "*" +protobuf = "*" + +[[package]] +name = "psycopg" +version = "3.2.6" +description = "PostgreSQL database adapter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58"}, + {file = "psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a"}, +] + +[package.dependencies] +psycopg-binary = {version = "3.2.6", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.2.6) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.6) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + +[[package]] +name = "psycopg-binary" +version = "3.2.6" +description = "PostgreSQL database adapter for Python -- C optimisation distribution" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "implementation_name != \"pypy\"" +files = [ + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1b639acb3e24243c23f75700bf6e3af7b76da92523ec7c3196a13aaf0b578453"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1b5c359173726b38d7acbb9f73270f269591d8031d099c1a70dd3f3d22b0e8a8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3434efe7c00f505f4c1e531519dac6c701df738ba7a1328eac81118d80019132"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bca8d9643191b13193940bbf84d51ac5a747e965c230177258fb02b8043fb7a"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55fa40f11d37e6e5149a282a5fd7e0734ce55c623673bfba638480914fd1414c"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0690ac1061c655b1bcbe9284d07bf5276bc9c0d788a6c74aaf3b042e64984b83"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9a4a9967ff650d2821d5fad6bec7b15f4c2072603e9fa3f89a39f351ade1fd3"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6f2894cc7aee8a15fe591e8536911d9c015cb404432cf7bdac2797e54cb2ba8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:05560c81312d7c2bee95a9860cd25198677f2320fb4a3527bc04e8cae7fcfb64"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4269cd23a485d6dd6eb6b10841c94551a53091cf0b1b6d5247a6a341f53f0d95"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:7942f35a6f314608720116bcd9de240110ceadffd2ac5c34f68f74a31e52e46a"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7afe181f6b3eb714362e9b6a2dc2a589bff60471a1d8639fd231a4e426e01523"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34bb0fceba0773dc0bfb53224bb2c0b19dc97ea0a997a223615484cf02cae55c"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54120122d2779dcd307f49e1f921d757fe5dacdced27deab37f277eef0c52a5b"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:816aa556f63b2303e66ba6c8888a8b3f3e6e4e47049ec7a4d62c84ac60b091ca"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19a0ba351eda9a59babf8c7c9d89c7bbc5b26bf096bc349b096bd0dd2482088"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e197e01290ef818a092c877025fc28096adbb6d0743e313491a21aab31bd96"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:274794b4b29ef426e09086404446b61a146f5e756da71366c5a6d57abec31f7d"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:69845bdc0db519e1dfc27932cd3d5b1ecb3f72950af52a1987508ab0b52b3b55"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:66c3bed2caf0d1cabcb9365064de183b5209a7cbeaa131e79e68f350c9c963c2"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e3ae3201fe85c7f901349a2cf52f02ceca4cb97a5e2e2ac8b8a1c9a6eb747bed"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:58f443b4df2adb59937c96775fadf4967f93d952fbcc82394446985faec11041"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f27a46ff0497e882e8c0286e8833c785b4d1a80f23e1bf606f4c90e5f9f3ce75"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b30ee4821ded7de48b8048b14952512588e7c5477b0a5965221e1798afba61a1"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e57edf3b1f5427f39660225b01f8e7b97f5cfab132092f014bf1638bc85d81d2"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c5172ce3e4ae7a4fd450070210f801e2ce6bc0f11d1208d29268deb0cda34de"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfab3804c43571a6615e559cdc4c4115785d258a4dd71a721be033f5f5f378d"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa1c920cce16f1205f37b20c685c58b9656b170b8b4c93629100d342d0d118e"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e118d818101c1608c6b5ba52a6c977614d8f05aa89467501172ba4d10588e11"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:763319a8bfeca77d31512da71f5a33459b9568a7621c481c3828c62f9c38f351"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2fbc05819560389dbece046966bc88e0f2ea77673497e274c4293b8b4c1d0703"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a57f99bb953b4bd6f32d0a9844664e7f6ca5ead9ba40e96635be3cd30794813"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:5de6809e19a465dcb9c269675bded46a135f2d600cd99f0735afbb21ddad2af4"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54af3fbf871baa2eb19df96fd7dc0cbd88e628a692063c3d1ab5cdd00aa04322"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ad5da1e4636776c21eaeacdec42f25fa4612631a12f25cd9ab34ddf2c346ffb9"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7956b9ea56f79cd86eddcfbfc65ae2af1e4fe7932fa400755005d903c709370"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e2efb763188008cf2914820dcb9fb23c10fe2be0d2c97ef0fac7cec28e281d8"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b3aab3451679f1e7932270e950259ed48c3b79390022d3f660491c0e65e4838"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849a370ac4e125f55f2ad37f928e588291a67ccf91fa33d0b1e042bb3ee1f986"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:566d4ace928419d91f1eb3227fc9ef7b41cf0ad22e93dd2c3368d693cf144408"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f1981f13b10de2f11cfa2f99a8738b35b3f0a0f3075861446894a8d3042430c0"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:36f598300b55b3c983ae8df06473ad27333d2fd9f3e2cfdb913b3a5aaa3a8bcf"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0f4699fa5fe1fffb0d6b2d14b31fd8c29b7ea7375f89d5989f002aaf21728b21"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:afe697b8b0071f497c5d4c0f41df9e038391534f5614f7fb3a8c1ca32d66e860"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da5554553b8d9fb7ab6bb1a37cc53f20ada9024916c60f40c09ab1a675323f2f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b7e3ccc43c395edba8039c9e407b01ed1844304c7f2f4aa99d34d04ed067c83"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d55405efc8a96aa0ecb2d5d6af552d35c744f160b133fa690814a68d9a952c8"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58d5cfb1687b69b3484a034d1aa6e5c11f0c1d46757e978ed59fab59ce83fd37"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3761c4107dab218c32ce4b10b1ae5ed686d41b882bfcb05f5bebc2be9488442f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:45f1526e12cb480586c74670f46563d3090fc2a93e859ccf71efae61f04cef4b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b4d4fd4415d5219785fb082e28d84be4fbd90c3bff3d861877db0aa6b0edd70b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:eb8a1e6b8130fee0b48107739e09553d50c6f031d0b3fcc33f885bb64fa01105"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7adf1460c05f7366f0fe9cf2d24e46abca9eb621705322bbd0c3f3e3a5edb2b4"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:28505f52ceef60554b5ab3289bf5aed2e7e57fa8e9a59a979d82db944e256a6c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:260c43c329e668606388cee78ec0dab083a25c2c6e6f9cf74a130fd5a27b0f87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9870e51fad4684dbdec057fa757d65e61cb2acb16236836e9360044c2a1ec880"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030e9c3082a931e972b029b3cef085784a3bf7f8e18367ae50d5b809aa6e1d87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60c9ed291fbd5e777c2c630dcfd10b7a87d68512b0757d5e7406d9c4895a82a"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e0f4a17a9c376c195e403b4826c18f325bd28f425231d36d1036258bf893e23"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac46da609624b16d961f604b3cbc3233ef43211ef1456a188f8c427109c9c3e1"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e77949b8e7014b85cee0bf6e9e041bcae7719b2693ebf59236368fb0b2a08814"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:532322d9ef6e7d178a4f344970b017110633bcc3dc1c3403efcef55aad612517"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:880c5fd76dcb50bdcc8f87359e5a6c7eb416697cc9aa02854c91223bd999c045"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c0cddc7458b8416d77cd8829d0192466502f31d1fb853d58613cf13ac64f41c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:ea158665676f42b19585dfe948071d3c5f28276f84a97522fb2e82c1d9194563"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.11.3" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} +pydantic-core = "2.33.1" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "python-box" +version = "7.3.2" +description = "Advanced Python dictionaries with dot notation access" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_box-7.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d136163294fd61a1554db7dd203f2e3035064798d30c17d67d948f0de5c572de"}, + {file = "python_box-7.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d72e96547d8e2c2c333909826e9fae338d9a7e4cde07d5c6058cdd468432c0"}, + {file = "python_box-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:3aa52e3b5cc50c80bb7ef4be3e41e81d095310f619454a7ffd61eef3209a6225"}, + {file = "python_box-7.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:32163b1cb151883de0da62b0cd3572610dc72ccf0762f2447baf1d2562e25bea"}, + {file = "python_box-7.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:064cb59b41e25aaf7dbd39efe53151a5f6797cc1cb3c68610f0f21a9d406d67e"}, + {file = "python_box-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:488f0fba9a6416c3334b602366dcd92825adb0811e07e03753dfcf0ed79cd6f7"}, + {file = "python_box-7.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:39009a2da5c20133718b24891a206592adbe09169856aedc450ad1600fc2e511"}, + {file = "python_box-7.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2a72e2f6fb97c7e472ff3272da207ecc615aa222e52e98352391428527c469"}, + {file = "python_box-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9eead914b9fb7d98a1473f5027dcfe27d26b3a10ffa33b9ba22cf948a23cd280"}, + {file = "python_box-7.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1dfc3b9b073f3d7cad1fa90de98eaaa684a494d0574bbc0666f74fa8307fd6b6"}, + {file = "python_box-7.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca4685a7f764b5a71b6e08535ce2a96b7964bb63d8cb4df10f6bb7147b6c54b"}, + {file = "python_box-7.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e143295f74d47a9ab24562ead2375c9be10629599b57f2e86717d3fff60f82a9"}, + {file = "python_box-7.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f3118ab3076b645c76133b8fac51deee30237cecdcafc3af664c4b9000f04db9"}, + {file = "python_box-7.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a760074ba12ccc247796f43b6c61f686ada4b8349ab59e2a6303b27f3ae082"}, + {file = "python_box-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ea436e7ff5f87bd728472f1e31a9e6e95572c81028c44a8e00097e0968955638"}, + {file = "python_box-7.3.2-py3-none-any.whl", hash = "sha256:fd7d74d5a848623f93b5221fd9fb00b8c00ff0e130fa87f396277aa188659c92"}, + {file = "python_box-7.3.2.tar.gz", hash = "sha256:028b9917129e67f311932d93347b8a4f1b500d7a5a2870ee3c035f4e7b19403b"}, +] + +[package.extras] +all = ["msgpack", "ruamel.yaml (>=0.17)", "toml"] +msgpack = ["msgpack"] +pyyaml = ["PyYAML"] +ruamel-yaml = ["ruamel.yaml (>=0.17)"] +toml = ["toml"] +tomli = ["tomli ; python_version < \"3.11\"", "tomli-w"] +yaml = ["ruamel.yaml (>=0.17)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-gitlab" +version = "5.6.0" +description = "The python wrapper for the GitLab REST and GraphQL APIs." +optional = false +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "python_gitlab-5.6.0-py3-none-any.whl", hash = "sha256:68980cd70929fc7f8f06d8a7b09bd046a6b79e1995c19d61249f046005099100"}, + {file = "python_gitlab-5.6.0.tar.gz", hash = "sha256:bc531e8ba3e5641b60409445d4919ace68a2c18cb0ec6d48fbced6616b954166"}, +] + +[package.dependencies] +requests = ">=2.32.0" +requests-toolbelt = ">=1.0.0" + +[package.extras] +autocompletion = ["argcomplete (>=1.10.0,<3)"] +graphql = ["gql[httpx] (>=3.5.0,<4)"] +yaml = ["PyYaml (>=6.0.1)"] + +[[package]] +name = "python-jsonpath" +version = "1.3.0" +description = "JSONPath, JSON Pointer and JSON Patch for Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "python_jsonpath-1.3.0-py3-none-any.whl", hash = "sha256:ce586ec5bd934ce97bc2f06600b00437d9684138b77273ced5b70694a8ef3a76"}, + {file = "python_jsonpath-1.3.0.tar.gz", hash = "sha256:ea5eb4d9b1296c8c19cc53538eb0f20fc54128f84571559ee63539e57875fefe"}, +] + +[[package]] +name = "python-ulid" +version = "3.0.0" +description = "Universally unique lexicographically sortable identifier" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[package.extras] +pydantic = ["pydantic (>=2.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["main"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rsa" +version = "4.9.1" +description = "Pure-Python RSA implementation" +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +files = [ + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "sanic" +version = "24.12.0" +description = "A web server and web framework that's written to go fast. Build fast. Run fast." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "sanic-24.12.0-py3-none-any.whl", hash = "sha256:3c2a01ec0b6c5926e3efe34eac1b497d31ed989038fe213eb25ad0c98687d388"}, + {file = "sanic-24.12.0.tar.gz", hash = "sha256:09c23aa917616c1e60e44c66dfd7582cb9fd6503f78298c309945909f5839836"}, +] + +[package.dependencies] +aiofiles = ">=0.6.0" +html5tagger = ">=1.2.1" +httptools = ">=0.0.10" +multidict = ">=5.0,<7.0" +sanic-ext = {version = "*", optional = true, markers = "extra == \"ext\""} +sanic-routing = ">=23.12.0" +setuptools = ">=70.1.0" +tracerite = ">=1.0.0" +typing-extensions = ">=4.4.0" +ujson = {version = ">=1.35", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} +uvloop = {version = ">=0.15.0", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} +websockets = ">=10.0" + +[package.extras] +all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)"] +ext = ["sanic-ext"] +http3 = ["aioquic"] +test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] + +[[package]] +name = "sanic-ext" +version = "24.12.0" +description = "Extend your Sanic installation with some core functionality." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "sanic_ext-24.12.0-py3-none-any.whl", hash = "sha256:861f809f071770cf28acd5f13e97ed59985e07361b13b4b4540da1333730c83e"}, + {file = "sanic_ext-24.12.0.tar.gz", hash = "sha256:8f912f4c29f242bc638346d09b79f0c8896ff64e79bd0e7fa09eac4b6c0e23c8"}, +] + +[package.dependencies] +pyyaml = ">=3.0.0" + +[package.extras] +dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] +test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] + +[[package]] +name = "sanic-routing" +version = "23.12.0" +description = "Core routing component for Sanic" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}, + {file = "sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}, +] + +[[package]] +name = "sentry-sdk" +version = "2.26.1" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "sentry_sdk-2.26.1-py2.py3-none-any.whl", hash = "sha256:e99390e3f217d13ddcbaeaed08789f1ca614d663b345b9da42e35ad6b60d696a"}, + {file = "sentry_sdk-2.26.1.tar.gz", hash = "sha256:759e019c41551a21519a95e6cef6d91fb4af1054761923dadaee2e6eca9c02c7"}, +] + +[package.dependencies] +certifi = "*" +sanic = {version = ">=0.8", optional = true, markers = "extra == \"sanic\""} +urllib3 = ">=1.26.11" + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] +http2 = ["httpcore[http2] (==1.*)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] +launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] +litestar = ["litestar (>=2.0.0)"] +loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +openfeature = ["openfeature-sdk (>=0.7.1)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro"] +pure-eval = ["asttokens", "executing", "pure_eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] +tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] + +[[package]] +name = "setuptools" +version = "75.9.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "setuptools-75.9.1-py3-none-any.whl", hash = "sha256:0a6f876d62f4d978ca1a11ab4daf728d1357731f978543ff18ecdbf9fd071f73"}, + {file = "setuptools-75.9.1.tar.gz", hash = "sha256:b6eca2c3070cdc82f71b4cb4bb2946bc0760a210d11362278cf1ff394e6ea32c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.40" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, +] + +[package.dependencies] +greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tenacity" +version = "9.1.2" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tracerite" +version = "1.1.1" +description = "Human-readable HTML tracebacks for Python exceptions" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "tracerite-1.1.1-py3-none-any.whl", hash = "sha256:3a787a9ecb1a136ea9ce17e6328e414ec414a4f644130af4e1e330bec2dece29"}, + {file = "tracerite-1.1.1.tar.gz", hash = "sha256:6400a35a187747189e4bb8d4a8e471bd86d14dbdcc94bcad23f4eda023f41356"}, +] + +[package.dependencies] +html5tagger = ">=1.2.1" + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20241206" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250402" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "tzdata" +version = "2025.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, +] + +[[package]] +name = "ujson" +version = "5.10.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" +files = [ + {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, + {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, + {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, + {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, + {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, + {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, + {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, + {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, + {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, + {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, + {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, + {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, + {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, + {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, +] + +[[package]] +name = "undictify" +version = "0.11.3" +description = "Type-checked function calls at runtime" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "undictify-0.11.3-py3-none-any.whl", hash = "sha256:4bfdc075b2f06ee027b05e241434c8efcbebf6c83fcc5b8d9d8def56dab4b5ff"}, + {file = "undictify-0.11.3.tar.gz", hash = "sha256:1481170ed8b9862c033e7549d817b90cead6002677c602d1bbdbf8ea15100098"}, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvloop" +version = "0.21.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" +files = [ + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "websockets" +version = "15.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, +] + +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[[package]] +name = "yarl" +version = "1.20.0" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19"}, + {file = "yarl-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d"}, + {file = "yarl-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5"}, + {file = "yarl-1.20.0-cp311-cp311-win32.whl", hash = "sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6"}, + {file = "yarl-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b"}, + {file = "yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64"}, + {file = "yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384"}, + {file = "yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62"}, + {file = "yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f"}, + {file = "yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac"}, + {file = "yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0"}, + {file = "yarl-1.20.0-cp39-cp39-win32.whl", hash = "sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8"}, + {file = "yarl-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7"}, + {file = "yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124"}, + {file = "yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + +[metadata] +lock-version = "2.1" +python-versions = "^3.13" +content-hash = "7938b536cdef40647ae41faeedb2f265396d84d411553f3f09244775f90f61b7" diff --git a/projects/k8s_watcher/pyproject.toml b/projects/k8s_watcher/pyproject.toml new file mode 100644 index 000000000..05d127a62 --- /dev/null +++ b/projects/k8s_watcher/pyproject.toml @@ -0,0 +1,93 @@ +[project] +name = "k8s_cache" +version = "0.1.0" +description = "" +authors = [ + { name = "Swiss Data Science Center", email = "contact@datascience.ch" }, +] +license = "" +requires-python = ">=3.13" +dynamic = ["dependencies"] + +[tool.poetry] +packages = [ + { include = "renku_data_services/k8s_cache", from = "../../bases" }, + { include = "renku_data_services/app_config", from = "../../components" }, + { include = "renku_data_services/authn", from = "../../components" }, + { include = "renku_data_services/authz", from = "../../components" }, + { include = "renku_data_services/base_api", from = "../../components" }, + { include = "renku_data_services/base_models", from = "../../components" }, + { include = "renku_data_services/base_orm", from = "../../components" }, + { include = "renku_data_services/crc", from = "../../components" }, + { include = "renku_data_services/connected_services", from = "../../components" }, + { include = "renku_data_services/db_config", from = "../../components" }, + { include = "renku_data_services/errors", from = "../../components" }, + { include = "renku_data_services/git", from = "../../components" }, + { include = "renku_data_services/k8s", from = "../../components" }, + { include = "renku_data_services/k8s_watcher", from = "../../components" }, + { include = "renku_data_services/message_queue", from = "../../components" }, + { include = "renku_data_services/namespace", from = "../../components" }, + { include = "renku_data_services/platform", from = "../../components" }, + { include = "renku_data_services/project", from = "../../components" }, + { include = "renku_data_services/repositories", from = "../../components" }, + { include = "renku_data_services/secrets", from = "../../components" }, + { include = "renku_data_services/session", from = "../../components" }, + { include = "renku_data_services/storage", from = "../../components" }, + { include = "renku_data_services/users", from = "../../components" }, + { include = "renku_data_services/utils", from = "../../components" }, + { include = "renku_data_services/data_connectors", from = "../../components" }, + { include = "renku_data_services/notebooks", from = "../../components" }, + # Note: poetry poly does not detect the migrations as dependencies, but they are. Don't remove these! + { include = "renku_data_services/migrations", from = "../../components" }, + { include = "renku_data_services/solr", from = "../../components" }, + { include = "renku_data_services/search", from = "../../components" }, + { include = "renku_data_services/metrics", from = "../../components" }, +] + +[tool.poetry.dependencies] +python = "^3.13" +sanic = { extras = ["ext"], version = "^24.12.0" } +pydantic = { extras = ["email"], version = "^2.10.6" } +datamodel-code-generator = "^0.24.2" +sqlalchemy = { extras = ["asyncio"], version = "^2.0.38" } +alembic = "^1.14.1" +asyncpg = "^0.30.0" +pyjwt = { extras = ["crypto"], version = "^2.10.1" } +tenacity = "^9.0.0" +httpx = "<0.29" +kubernetes = "^31.0.0" +python-ulid = "^3.0.0" +python-gitlab = "^5.6.0" +psycopg = { version = "^3.2.3", extras = ["binary"] } +urllib3 = "^2.3.0" +deepmerge = "^2.0" +authlib = "^1.5.0" +dataclasses-avroschema = "^0.65.8" +undictify = "^0.11.3" +prometheus-sanic = "^3.0.0" +sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } +authzed = "^1.20.0" +# see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore +setuptools = { version = "^75.8.2" } +aiofile = "^3.9.0" +# Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of +# google.protobuf.runtime_version.VersionError: +# Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. +protobuf = "^5.29.3" +cryptography = "^44.0.1" +kubernetes-asyncio = "^32.0.0" +marshmallow = "^3.26.1" +escapism = "^1.0.1" +kr8s = "^0.20.7" +python-box = "^7.0.1" +werkzeug = "^3.1.3" +toml = "^0.10.2" +parsy = "^2.1" +sanic-ext = "^24.12.0" +markdown-code-runner = "^2.2.0" + +[tool.poetry.group.dev.dependencies] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/projects/renku_data_service/Dockerfile b/projects/renku_data_service/Dockerfile index 59e684286..3d191d2e3 100644 --- a/projects/renku_data_service/Dockerfile +++ b/projects/renku_data_service/Dockerfile @@ -1,20 +1,15 @@ -FROM python:3.12-bookworm as builder +ARG RCLONE_IMAGE_REPOSITORY="ghcr.io/swissdatasciencecenter/rclone" +ARG RCLONE_IMAGE_TAG="sha-66b7f3e" +FROM ${RCLONE_IMAGE_REPOSITORY}:${RCLONE_IMAGE_TAG} AS rclone + +FROM python:3.13-bookworm AS builder ARG DEV_BUILD=false ARG USER_UID=1000 -ARG RCLONE_VERSION=v1.65.2 -ARG RCLONE_ARCH=amd64 -ARG RCLONE_OS=linux ARG USER_GID=$USER_UID RUN groupadd --gid $USER_GID renku && \ DEBIAN_FRONTEND=noninteractive adduser --gid $USER_GID --uid $USER_UID renku -RUN cd /tmp \ - && wget -q https://downloads.rclone.org/${RCLONE_VERSION}/rclone-${RCLONE_VERSION}-${RCLONE_OS}-${RCLONE_ARCH}.zip \ - && unzip /tmp/rclone-${RCLONE_VERSION}-${RCLONE_OS}-${RCLONE_ARCH}.zip \ - && mv /tmp/rclone-${RCLONE_VERSION}-${RCLONE_OS}-${RCLONE_ARCH}/rclone /usr/bin \ - && chmod 755 /usr/bin/rclone \ - && chown root:root /usr/bin/rclone \ - && rm -r /tmp/rclone* +COPY --from=rclone --chown=root:root --chmod=755 /rclone /usr/bin/rclone USER $USER_UID:$USER_GID WORKDIR /app RUN python3 -m pip install --user pipx && \ @@ -23,7 +18,9 @@ RUN python3 -m pip install --user pipx && \ /home/renku/.local/bin/pipx install virtualenv && \ /home/renku/.local/bin/virtualenv env && \ /home/renku/.local/bin/poetry self add poetry-multiproject-plugin && \ - /home/renku/.local/bin/poetry self add poetry-polylith-plugin + /home/renku/.local/bin/poetry self add poetry-polylith-plugin && \ + /home/renku/.local/bin/poetry self add poetry-plugin-export + COPY --chown=$USER_UID:$USER_GID . . RUN if $DEV_BUILD ; then \ /home/renku/.local/bin/poetry export -o requirements.txt --with dev; \ @@ -31,10 +28,10 @@ RUN if $DEV_BUILD ; then \ /home/renku/.local/bin/poetry export -o requirements.txt; \ fi && \ env/bin/pip install -r requirements.txt -RUN /home/renku/.local/bin/poetry build-project -f wheel -C projects/renku_data_service +RUN /home/renku/.local/bin/poetry -C projects/renku_data_service build-project -f wheel --custom-temp-path=/tmp RUN env/bin/pip --no-cache-dir install projects/renku_data_service/dist/*.whl -FROM python:3.12-slim-bookworm +FROM python:3.13-slim-bookworm ARG USER_UID=1000 ARG USER_GID=$USER_UID ENV prometheus_multiproc_dir=/prometheus @@ -49,4 +46,5 @@ COPY --from=builder /usr/bin/rclone /usr/bin USER $USER_UID:$USER_GID WORKDIR /app COPY --from=builder /app/env ./env +ENV DB_POOL_SIZE=10 ENTRYPOINT ["tini", "-g", "--", "env/bin/python", "-m", "renku_data_services.data_api.main"] diff --git a/projects/renku_data_service/poetry.lock b/projects/renku_data_service/poetry.lock index e0b8d67ca..cd62d2d9a 100644 --- a/projects/renku_data_service/poetry.lock +++ b/projects/renku_data_service/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiofile" @@ -6,6 +6,7 @@ version = "3.9.0" description = "Asynchronous file operations." optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, @@ -20,115 +21,137 @@ version = "24.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, ] +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + [[package]] name = "aiohttp" -version = "3.9.5" +version = "3.11.18" description = "Async http client/server framework (asyncio)" optional = false -python-versions = ">=3.8" -files = [ - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fcde4c397f673fdec23e6b05ebf8d4751314fa7c24f93334bf1f1364c1c69ac7"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d6b3f1fabe465e819aed2c421a6743d8debbde79b6a8600739300630a01bf2c"}, - {file = "aiohttp-3.9.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6ae79c1bc12c34082d92bf9422764f799aee4746fd7a392db46b7fd357d4a17a"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d3ebb9e1316ec74277d19c5f482f98cc65a73ccd5430540d6d11682cd857430"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84dabd95154f43a2ea80deffec9cb44d2e301e38a0c9d331cc4aa0166fe28ae3"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c8a02fbeca6f63cb1f0475c799679057fc9268b77075ab7cf3f1c600e81dd46b"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c26959ca7b75ff768e2776d8055bf9582a6267e24556bb7f7bd29e677932be72"}, - {file = "aiohttp-3.9.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:714d4e5231fed4ba2762ed489b4aec07b2b9953cf4ee31e9871caac895a839c0"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e7a6a8354f1b62e15d48e04350f13e726fa08b62c3d7b8401c0a1314f02e3558"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c413016880e03e69d166efb5a1a95d40f83d5a3a648d16486592c49ffb76d0db"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ff84aeb864e0fac81f676be9f4685f0527b660f1efdc40dcede3c251ef1e867f"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ad7f2919d7dac062f24d6f5fe95d401597fbb015a25771f85e692d043c9d7832"}, - {file = "aiohttp-3.9.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:702e2c7c187c1a498a4e2b03155d52658fdd6fda882d3d7fbb891a5cf108bb10"}, - {file = "aiohttp-3.9.5-cp310-cp310-win32.whl", hash = "sha256:67c3119f5ddc7261d47163ed86d760ddf0e625cd6246b4ed852e82159617b5fb"}, - {file = "aiohttp-3.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:471f0ef53ccedec9995287f02caf0c068732f026455f07db3f01a46e49d76bbb"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ae53e33ee7476dd3d1132f932eeb39bf6125083820049d06edcdca4381f342"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c088c4d70d21f8ca5c0b8b5403fe84a7bc8e024161febdd4ef04575ef35d474d"}, - {file = "aiohttp-3.9.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:639d0042b7670222f33b0028de6b4e2fad6451462ce7df2af8aee37dcac55424"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f26383adb94da5e7fb388d441bf09c61e5e35f455a3217bfd790c6b6bc64b2ee"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66331d00fb28dc90aa606d9a54304af76b335ae204d1836f65797d6fe27f1ca2"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ff550491f5492ab5ed3533e76b8567f4b37bd2995e780a1f46bca2024223233"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f22eb3a6c1080d862befa0a89c380b4dafce29dc6cd56083f630073d102eb595"}, - {file = "aiohttp-3.9.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a81b1143d42b66ffc40a441379387076243ef7b51019204fd3ec36b9f69e77d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f64fd07515dad67f24b6ea4a66ae2876c01031de91c93075b8093f07c0a2d93d"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:93e22add827447d2e26d67c9ac0161756007f152fdc5210277d00a85f6c92323"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:55b39c8684a46e56ef8c8d24faf02de4a2b2ac60d26cee93bc595651ff545de9"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4715a9b778f4293b9f8ae7a0a7cef9829f02ff8d6277a39d7f40565c737d3771"}, - {file = "aiohttp-3.9.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:afc52b8d969eff14e069a710057d15ab9ac17cd4b6753042c407dcea0e40bf75"}, - {file = "aiohttp-3.9.5-cp311-cp311-win32.whl", hash = "sha256:b3df71da99c98534be076196791adca8819761f0bf6e08e07fd7da25127150d6"}, - {file = "aiohttp-3.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:88e311d98cc0bf45b62fc46c66753a83445f5ab20038bcc1b8a1cc05666f428a"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:c7a4b7a6cf5b6eb11e109a9755fd4fda7d57395f8c575e166d363b9fc3ec4678"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:0a158704edf0abcac8ac371fbb54044f3270bdbc93e254a82b6c82be1ef08f3c"}, - {file = "aiohttp-3.9.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d153f652a687a8e95ad367a86a61e8d53d528b0530ef382ec5aaf533140ed00f"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82a6a97d9771cb48ae16979c3a3a9a18b600a8505b1115cfe354dfb2054468b4"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:60cdbd56f4cad9f69c35eaac0fbbdf1f77b0ff9456cebd4902f3dd1cf096464c"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8676e8fd73141ded15ea586de0b7cda1542960a7b9ad89b2b06428e97125d4fa"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da00da442a0e31f1c69d26d224e1efd3a1ca5bcbf210978a2ca7426dfcae9f58"}, - {file = "aiohttp-3.9.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18f634d540dd099c262e9f887c8bbacc959847cfe5da7a0e2e1cf3f14dbf2daf"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:320e8618eda64e19d11bdb3bd04ccc0a816c17eaecb7e4945d01deee2a22f95f"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2faa61a904b83142747fc6a6d7ad8fccff898c849123030f8e75d5d967fd4a81"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:8c64a6dc3fe5db7b1b4d2b5cb84c4f677768bdc340611eca673afb7cf416ef5a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:393c7aba2b55559ef7ab791c94b44f7482a07bf7640d17b341b79081f5e5cd1a"}, - {file = "aiohttp-3.9.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c671dc117c2c21a1ca10c116cfcd6e3e44da7fcde37bf83b2be485ab377b25da"}, - {file = "aiohttp-3.9.5-cp312-cp312-win32.whl", hash = "sha256:5a7ee16aab26e76add4afc45e8f8206c95d1d75540f1039b84a03c3b3800dd59"}, - {file = "aiohttp-3.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:5ca51eadbd67045396bc92a4345d1790b7301c14d1848feaac1d6a6c9289e888"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:694d828b5c41255e54bc2dddb51a9f5150b4eefa9886e38b52605a05d96566e8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0605cc2c0088fcaae79f01c913a38611ad09ba68ff482402d3410bf59039bfb8"}, - {file = "aiohttp-3.9.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4558e5012ee03d2638c681e156461d37b7a113fe13970d438d95d10173d25f78"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dbc053ac75ccc63dc3a3cc547b98c7258ec35a215a92bd9f983e0aac95d3d5b"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4109adee842b90671f1b689901b948f347325045c15f46b39797ae1bf17019de"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6ea1a5b409a85477fd8e5ee6ad8f0e40bf2844c270955e09360418cfd09abac"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3c2890ca8c59ee683fd09adf32321a40fe1cf164e3387799efb2acebf090c11"}, - {file = "aiohttp-3.9.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3916c8692dbd9d55c523374a3b8213e628424d19116ac4308e434dbf6d95bbdd"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8d1964eb7617907c792ca00b341b5ec3e01ae8c280825deadbbd678447b127e1"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5ab8e1f6bee051a4bf6195e38a5c13e5e161cb7bad83d8854524798bd9fcd6e"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:52c27110f3862a1afbcb2af4281fc9fdc40327fa286c4625dfee247c3ba90156"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:7f64cbd44443e80094309875d4f9c71d0401e966d191c3d469cde4642bc2e031"}, - {file = "aiohttp-3.9.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8b4f72fbb66279624bfe83fd5eb6aea0022dad8eec62b71e7bf63ee1caadeafe"}, - {file = "aiohttp-3.9.5-cp38-cp38-win32.whl", hash = "sha256:6380c039ec52866c06d69b5c7aad5478b24ed11696f0e72f6b807cfb261453da"}, - {file = "aiohttp-3.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:da22dab31d7180f8c3ac7c7635f3bcd53808f374f6aa333fe0b0b9e14b01f91a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1732102949ff6087589408d76cd6dea656b93c896b011ecafff418c9661dc4ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c6021d296318cb6f9414b48e6a439a7f5d1f665464da507e8ff640848ee2a58a"}, - {file = "aiohttp-3.9.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:239f975589a944eeb1bad26b8b140a59a3a320067fb3cd10b75c3092405a1372"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b7b30258348082826d274504fbc7c849959f1989d86c29bc355107accec6cfb"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd2adf5c87ff6d8b277814a28a535b59e20bfea40a101db6b3bdca7e9926bc24"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9a3d838441bebcf5cf442700e3963f58b5c33f015341f9ea86dcd7d503c07e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3a1ae66e3d0c17cf65c08968a5ee3180c5a95920ec2731f53343fac9bad106"}, - {file = "aiohttp-3.9.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9c69e77370cce2d6df5d12b4e12bdcca60c47ba13d1cbbc8645dd005a20b738b"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf56238f4bbf49dab8c2dc2e6b1b68502b1e88d335bea59b3f5b9f4c001475"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d1469f228cd9ffddd396d9948b8c9cd8022b6d1bf1e40c6f25b0fb90b4f893ed"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:45731330e754f5811c314901cebdf19dd776a44b31927fa4b4dbecab9e457b0c"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:3fcb4046d2904378e3aeea1df51f697b0467f2aac55d232c87ba162709478c46"}, - {file = "aiohttp-3.9.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cf142aa6c1a751fcb364158fd710b8a9be874b81889c2bd13aa8893197455e2"}, - {file = "aiohttp-3.9.5-cp39-cp39-win32.whl", hash = "sha256:7b179eea70833c8dee51ec42f3b4097bd6370892fa93f510f76762105568cf09"}, - {file = "aiohttp-3.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:38d80498e2e169bc61418ff36170e0aad0cd268da8b38a17c4cf29d254a8b3f1"}, - {file = "aiohttp-3.9.5.tar.gz", hash = "sha256:edea7d15772ceeb29db4aff55e482d4bcfb6ae160ce144f2682de02f6d693551"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868"}, + {file = "aiohttp-3.11.18-cp310-cp310-win32.whl", hash = "sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f"}, + {file = "aiohttp-3.11.18-cp310-cp310-win_amd64.whl", hash = "sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd"}, + {file = "aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d"}, + {file = "aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea"}, + {file = "aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8"}, + {file = "aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7"}, + {file = "aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78"}, + {file = "aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935"}, + {file = "aiohttp-3.11.18-cp39-cp39-win32.whl", hash = "sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc"}, + {file = "aiohttp-3.11.18-cp39-cp39-win_amd64.whl", hash = "sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef"}, + {file = "aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a"}, ] [package.dependencies] +aiohappyeyeballs = ">=2.3.0" aiosignal = ">=1.1.2" attrs = ">=17.3.0" frozenlist = ">=1.1.1" multidict = ">=4.5,<7.0" -yarl = ">=1.0,<2.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" [package.extras] -speedups = ["Brotli", "aiodns", "brotlicffi"] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] name = "aiosignal" -version = "1.3.1" +version = "1.3.2" description = "aiosignal: a list of registered asynchronous callbacks" optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, - {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, ] [package.dependencies] @@ -136,22 +159,23 @@ frozenlist = ">=1.1.0" [[package]] name = "alembic" -version = "1.14.0" +version = "1.15.2" description = "A database migration tool for SQLAlchemy." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, - {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, + {file = "alembic-1.15.2-py3-none-any.whl", hash = "sha256:2e76bd916d547f6900ec4bb5a90aeac1485d2c92536923d0b138c02b126edc53"}, + {file = "alembic-1.15.2.tar.gz", hash = "sha256:1c72391bbdeffccfe317eefba686cb9a3c078005478885413b95c3b26c57a8a7"}, ] [package.dependencies] Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" +SQLAlchemy = ">=1.4.0" +typing-extensions = ">=4.12" [package.extras] -tz = ["backports.zoneinfo"] +tz = ["tzdata"] [[package]] name = "annotated-types" @@ -159,6 +183,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -166,13 +191,14 @@ files = [ [[package]] name = "anyio" -version = "4.4.0" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] @@ -180,19 +206,20 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] [[package]] name = "argcomplete" -version = "3.4.0" +version = "3.6.2" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "argcomplete-3.4.0-py3-none-any.whl", hash = "sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5"}, - {file = "argcomplete-3.4.0.tar.gz", hash = "sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f"}, + {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, + {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, ] [package.extras] @@ -204,6 +231,7 @@ version = "0.3.1" description = "Helpers to use cachetools with async code." optional = false python-versions = ">=3.8,<4.0" +groups = ["main"] files = [ {file = "asyncache-0.3.1-py3-none-any.whl", hash = "sha256:ef20a1024d265090dd1e0785c961cf98b9c32cc7d9478973dcf25ac1b80011f5"}, {file = "asyncache-0.3.1.tar.gz", hash = "sha256:9a1e60a75668e794657489bdea6540ee7e3259c483517b934670db7600bf5035"}, @@ -218,6 +246,7 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -272,37 +301,39 @@ files = [ [package.extras] docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi", "sspilib"] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] [[package]] name = "attrs" -version = "23.2.0" +version = "25.3.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, - {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, ] [package.extras] -cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] -dev = ["attrs[tests]", "pre-commit"] -docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] -tests = ["attrs[tests-no-zope]", "zope-interface"] -tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] -tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] [[package]] name = "authlib" -version = "1.3.2" +version = "1.6.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, - {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, + {file = "authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d"}, + {file = "authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210"}, ] [package.dependencies] @@ -310,13 +341,14 @@ cryptography = "*" [[package]] name = "authzed" -version = "1.1.0" +version = "1.21.1" description = "Client library for SpiceDB." optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" +groups = ["main"] files = [ - {file = "authzed-1.1.0-py3-none-any.whl", hash = "sha256:1c37038655c55d054b5caf918d60d680262fda4bc2787dc83576b4424e358214"}, - {file = "authzed-1.1.0.tar.gz", hash = "sha256:6e1300ff75af1840acdb3e0b2bc0dec31a8cf631c4ac6fc1ac674b9ea02d043a"}, + {file = "authzed-1.21.1-py3-none-any.whl", hash = "sha256:9a830c0e9eefc506181f0d82c9a9f73405db46d50e8ecaedd4488486a2792959"}, + {file = "authzed-1.21.1.tar.gz", hash = "sha256:c354d19af5ef1a393381d5be670dd946916742573ae2bf3ac87becdbf44f093b"}, ] [package.dependencies] @@ -324,55 +356,50 @@ googleapis-common-protos = ">=1.65.0,<2.0.0" grpc-interceptor = ">=0.15.4,<0.16.0" grpcio = ">=1.63,<2.0" protobuf = ">=5.26,<6" +protovalidate = ">=0.7.1,<0.8.0" [[package]] -name = "avro-preprocessor" -version = "0.5.1" -description = "A preprocessor for Avro Schemata" +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" optional = false -python-versions = "*" +python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ - {file = "avro-preprocessor-0.5.1.tar.gz", hash = "sha256:d878ea5134223580ba5f6636d812c844cb0f7552025755306b1fa483f015bd5f"}, - {file = "avro_preprocessor-0.5.1-py3-none-any.whl", hash = "sha256:04c62a47d97bf3f5b025d9e0c25d6ae3f4cac40cef89c3530bd7a3ed8affe73a"}, + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] -[package.dependencies] -json5 = ">=0.9.21" -networkx = ">=2.8.7" -pygments = ">=2.13.0" -requests = ">=2.28.1" -"ruamel.yaml" = ">=0.17.21" -"ruamel.yaml.clib" = ">=0.2.6" - [[package]] name = "black" -version = "24.4.2" +version = "25.1.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] [package.dependencies] @@ -384,126 +411,159 @@ platformdirs = ">=2" [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] [[package]] name = "caio" -version = "0.9.17" +version = "0.9.24" description = "Asynchronous file IO for Linux MacOS or Windows." optional = false -python-versions = "<4,>=3.7" -files = [ - {file = "caio-0.9.17-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3f69395fdd45c115b2ef59732e3c8664722a2b51de2d6eedb3d354b2f5f3be3c"}, - {file = "caio-0.9.17-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3028b746e9ec7f6d6ebb386a7fd8caf0eebed5d6e6b4f18c8ef25861934b1673"}, - {file = "caio-0.9.17-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:079730a353bbde03796fab681e969472eace09ffbe5000e584868a7fe389ba6f"}, - {file = "caio-0.9.17-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:549caa51b475877fe32856a26fe937366ae7a1c23a9727005b441db9abb12bcc"}, - {file = "caio-0.9.17-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0ddb253b145a53ecca76381677ce465bc5efeaecb6aaf493fac43ae79659f0fb"}, - {file = "caio-0.9.17-cp312-cp312-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e320b0ea371c810359934f8e8fe81777c493cc5fb4d41de44277cbe7336e74"}, - {file = "caio-0.9.17-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a39a49e279f82aa022f0786339d45d9550b5aa3e46eec7d08e0f351c503df0a5"}, - {file = "caio-0.9.17-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e96925b9f15f43e6ef1d42a83edfd937eb11a984cb6ef7c10527e963595497"}, - {file = "caio-0.9.17-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fca916240597005d2b734f1442fa3c3cfb612bf46e0978b5232e5492a371de38"}, - {file = "caio-0.9.17-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40bd0afbd3491d1e407bcf74e3a9e9cc67a7f290ed29518325194184d63cc2b6"}, - {file = "caio-0.9.17-py3-none-any.whl", hash = "sha256:c55d4dc6b3a36f93237ecd6360e1c131c3808bc47d4191a130148a99b80bb311"}, - {file = "caio-0.9.17.tar.gz", hash = "sha256:8f30511526814d961aeef389ea6885273abe6c655f1e08abbadb95d12fdd9b4f"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "caio-0.9.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d80322126a97ba572412b17b2f086ff95195de2c4261deb19db6bfcdc9ef7540"}, + {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:37bc172349686139e8dc97fff7662c67b1837e18a67b99e8ef25585f2893d013"}, + {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ad7f0902bf952237e120606252c14ab3cb05995c9f79f39154b5248744864832"}, + {file = "caio-0.9.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:925b9e3748ce1a79386dfb921c0aee450e43225534551abd1398b1c08f9ba29f"}, + {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:3b4dc0a8fb9a58ab40f967ad5a8a858cc0bfb2348a580b4142595849457f9c9a"}, + {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fa74d111b3b165bfad2e333367976bdf118bcf505a1cb44d3bcddea2849e3297"}, + {file = "caio-0.9.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae3566228383175265a7583107f21a7cb044a752ea29ba84fce7c1a49a05903"}, + {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:a306b0dda91cb4ca3170f066c114597f8ea41b3da578574a9d2b54f86963de68"}, + {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:8ee158e56128d865fb7d57a9c9c22fca4e8aa8d8664859c977a36fff3ccb3609"}, + {file = "caio-0.9.24-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d47ef8d76aca74c17cb07339a441c5530fc4b8dd9222dfb1e1abd7f9f9b814f"}, + {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:d15fc746c4bf0077d75df05939d1e97c07ccaa8e580681a77021d6929f65d9f4"}, + {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9368eae0a9badd5f31264896c51b47431d96c0d46f1979018fb1d20c49f56156"}, + {file = "caio-0.9.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f0e5a645ef4e7bb7a81e10ae2a7aef14988cb2cb4354588c6bf6f6f3f6de72a"}, + {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:08304fa80af7771c78a5bcc923449c7ec8134d589b50d48c66320f85552c7ae2"}, + {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:5339ced0764e10242a50ccb21db7f0d9c359881db0f72fa2c5e45ed828ffacf7"}, + {file = "caio-0.9.24.tar.gz", hash = "sha256:5bcdecaea02a9aa8e3acf0364eff8ad9903d57d70cdb274a42270126290a77f1"}, ] [package.extras] -develop = ["aiomisc-pytest", "pytest", "pytest-cov"] +develop = ["aiomisc-pytest", "coveralls", "pylama[toml]", "pytest", "pytest-cov", "setuptools"] [[package]] -name = "casefy" -version = "0.1.7" -description = "Utilities for string case conversion." +name = "cel-python" +version = "0.2.0" +description = "Pure Python implementation of Google Common Expression Language" optional = false -python-versions = ">=3.6" +python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ - {file = "casefy-0.1.7-py3-none-any.whl", hash = "sha256:ab05ff1c67f2a8e62d9f8986fa9a849416d61ac5413ec57d1f827b4f36589cf6"}, - {file = "casefy-0.1.7.tar.gz", hash = "sha256:6accce985a64b9edb2a610a29ac489d78fac80e52ff8f2d137e294f2f92b8027"}, + {file = "cel_python-0.2.0-py3-none-any.whl", hash = "sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60"}, + {file = "cel_python-0.2.0.tar.gz", hash = "sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f"}, ] +[package.dependencies] +jmespath = ">=1.0.1,<2.0.0" +lark = ">=0.12.0,<0.13.0" +python-dateutil = ">=2.9.0.post0,<3.0.0" +pyyaml = ">=6.0.1,<7.0.0" +types-python-dateutil = ">=2.9.0.20240316,<3.0.0.0" +types-pyyaml = ">=6.0.12.20240311,<7.0.0.0" + [[package]] name = "certifi" -version = "2024.7.4" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -511,112 +571,116 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -628,6 +692,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -635,99 +701,69 @@ files = [ [[package]] name = "cryptography" -version = "44.0.0" +version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" -files = [ - {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, - {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, - {file = "cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd"}, - {file = "cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, - {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, - {file = "cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"}, - {file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"}, +groups = ["main"] +files = [ + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, ] [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "dacite" -version = "1.8.1" -description = "Simple creation of data classes from dictionaries." -optional = false -python-versions = ">=3.6" -files = [ - {file = "dacite-1.8.1-py3-none-any.whl", hash = "sha256:cc31ad6fdea1f49962ea42db9421772afe01ac5442380d9a99fcf3d188c61afe"}, -] - -[package.extras] -dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] - -[[package]] -name = "dataclasses-avroschema" -version = "0.65.4" -description = "Generate Avro Schemas from Python classes. Serialize/Deserialize python instances with avro schemas" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "dataclasses_avroschema-0.65.4-py3-none-any.whl", hash = "sha256:f9a12541c73dfd79d68be4e873b0045b38fc03f31457e76102c91c0df75958d9"}, - {file = "dataclasses_avroschema-0.65.4.tar.gz", hash = "sha256:d91c63b854b397595fb90946840fe02f29c1ca8cec000f3aa79f8f757aae0528"}, -] - -[package.dependencies] -casefy = ">=0.1.7,<0.2.0" -dacite = ">=1.8.0,<2.0.0" -fastavro = ">=1.7.3,<2.0.0" -inflection = ">=0.5.1,<0.6.0" -python-dateutil = ">=2.7,<3.0" -typing-extensions = ">=4.2.0,<5.0.0" - -[package.extras] -cli = ["dc-avro (>=0.6.4)"] -faker = ["faker (>=26.0.0,<31.0.0)"] -faust = ["faust-streaming (>=0.10.11,<0.12.0)"] -pydantic = ["pydantic[email] (>=2.4.2,<3.0.0)"] - [[package]] name = "datamodel-code-generator" version = "0.24.2" description = "Datamodel Code Generator" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "datamodel_code_generator-0.24.2-py3-none-any.whl", hash = "sha256:582c30466def12600d7165c5f624bb63a7e944eeaf8320f282518daf9ccb566c"}, {file = "datamodel_code_generator-0.24.2.tar.gz", hash = "sha256:d278c751038c8911efc82856ec549ac1e3e13134567387a4bb5ab7ddc6543162"}, @@ -755,6 +791,7 @@ version = "2.0" description = "A toolset for deeply merging Python dictionaries." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00"}, {file = "deepmerge-2.0.tar.gz", hash = "sha256:5c3d86081fbebd04dd5de03626a0607b809a98fb6ccba5770b62466fe940ff20"}, @@ -763,34 +800,49 @@ files = [ [package.extras] dev = ["black", "build", "mypy", "pytest", "pyupgrade", "twine", "validate-pyproject[all]"] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "dnspython" -version = "2.6.1" +version = "2.7.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] [[package]] name = "durationpy" -version = "0.7" +version = "0.9" description = "Module for converting between datetime.timedelta and Go's Duration strings." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "durationpy-0.7.tar.gz", hash = "sha256:8447c43df4f1a0b434e70c15a38d77f5c9bd17284bfc1ff1d430f233d5083732"}, + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, ] [[package]] @@ -799,6 +851,7 @@ version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, @@ -814,195 +867,124 @@ version = "1.0.1" description = "Simple, generic API for escaping strings." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "escapism-1.0.1-py2.py3-none-any.whl", hash = "sha256:d28f19edc3cb1ffc36fa238956ecc068695477e748f57157c6dde00a6b77f229"}, {file = "escapism-1.0.1.tar.gz", hash = "sha256:73256bdfb4f22230f0428fc6efecee61cdc4fad531b6f98b849cb9c80711e4ec"}, ] -[[package]] -name = "factory-boy" -version = "3.3.0" -description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." -optional = false -python-versions = ">=3.7" -files = [ - {file = "factory_boy-3.3.0-py2.py3-none-any.whl", hash = "sha256:a2cdbdb63228177aa4f1c52f4b6d83fab2b8623bf602c7dedd7eb83c0f69c04c"}, - {file = "factory_boy-3.3.0.tar.gz", hash = "sha256:bc76d97d1a65bbd9842a6d722882098eb549ec8ee1081f9fb2e8ff29f0c300f1"}, -] - -[package.dependencies] -Faker = ">=0.7.0" - -[package.extras] -dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "sqlalchemy-utils", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] -doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] - -[[package]] -name = "faker" -version = "26.0.0" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Faker-26.0.0-py3-none-any.whl", hash = "sha256:886ee28219be96949cd21ecc96c4c742ee1680e77f687b095202c8def1a08f06"}, - {file = "Faker-26.0.0.tar.gz", hash = "sha256:0f60978314973de02c00474c2ae899785a42b2cf4f41b7987e93c132a2b8a4a9"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" - -[[package]] -name = "fakeredis" -version = "2.26.1" -description = "Python implementation of redis API, can be used for testing purposes." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "fakeredis-2.26.1-py3-none-any.whl", hash = "sha256:68a5615d7ef2529094d6958677e30a6d30d544e203a5ab852985c19d7ad57e32"}, - {file = "fakeredis-2.26.1.tar.gz", hash = "sha256:69f4daafe763c8014a6dbf44a17559c46643c95447b3594b3975251a171b806d"}, -] - -[package.dependencies] -redis = {version = ">=4.3", markers = "python_full_version > \"3.8.0\""} -sortedcontainers = ">=2,<3" - -[package.extras] -bf = ["pyprobables (>=0.6,<0.7)"] -cf = ["pyprobables (>=0.6,<0.7)"] -json = ["jsonpath-ng (>=1.6,<2.0)"] -lua = ["lupa (>=2.1,<3.0)"] -probabilistic = ["pyprobables (>=0.6,<0.7)"] - -[[package]] -name = "fastavro" -version = "1.9.5" -description = "Fast read/write of AVRO files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastavro-1.9.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:61253148e95dd2b6457247b441b7555074a55de17aef85f5165bfd5facf600fc"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b604935d671ad47d888efc92a106f98e9440874108b444ac10e28d643109c937"}, - {file = "fastavro-1.9.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0adbf4956fd53bd74c41e7855bb45ccce953e0eb0e44f5836d8d54ad843f9944"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:53d838e31457db8bf44460c244543f75ed307935d5fc1d93bc631cc7caef2082"}, - {file = "fastavro-1.9.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:07b6288e8681eede16ff077632c47395d4925c2f51545cd7a60f194454db2211"}, - {file = "fastavro-1.9.5-cp310-cp310-win_amd64.whl", hash = "sha256:ef08cf247fdfd61286ac0c41854f7194f2ad05088066a756423d7299b688d975"}, - {file = "fastavro-1.9.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c52d7bb69f617c90935a3e56feb2c34d4276819a5c477c466c6c08c224a10409"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85e05969956003df8fa4491614bc62fe40cec59e94d06e8aaa8d8256ee3aab82"}, - {file = "fastavro-1.9.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06e6df8527493a9f0d9a8778df82bab8b1aa6d80d1b004e5aec0a31dc4dc501c"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:27820da3b17bc01cebb6d1687c9d7254b16d149ef458871aaa207ed8950f3ae6"}, - {file = "fastavro-1.9.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:195a5b8e33eb89a1a9b63fa9dce7a77d41b3b0cd785bac6044df619f120361a2"}, - {file = "fastavro-1.9.5-cp311-cp311-win_amd64.whl", hash = "sha256:be612c109efb727bfd36d4d7ed28eb8e0506617b7dbe746463ebbf81e85eaa6b"}, - {file = "fastavro-1.9.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b133456c8975ec7d2a99e16a7e68e896e45c821b852675eac4ee25364b999c14"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf586373c3d1748cac849395aad70c198ee39295f92e7c22c75757b5c0300fbe"}, - {file = "fastavro-1.9.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:724ef192bc9c55d5b4c7df007f56a46a21809463499856349d4580a55e2b914c"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bfd11fe355a8f9c0416803afac298960eb4c603a23b1c74ff9c1d3e673ea7185"}, - {file = "fastavro-1.9.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9827d1654d7bcb118ef5efd3e5b2c9ab2a48d44dac5e8c6a2327bc3ac3caa828"}, - {file = "fastavro-1.9.5-cp312-cp312-win_amd64.whl", hash = "sha256:d84b69dca296667e6137ae7c9a96d060123adbc0c00532cc47012b64d38b47e9"}, - {file = "fastavro-1.9.5-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:fb744e9de40fb1dc75354098c8db7da7636cba50a40f7bef3b3fb20f8d189d88"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:240df8bacd13ff5487f2465604c007d686a566df5cbc01d0550684eaf8ff014a"}, - {file = "fastavro-1.9.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3bb35c25bbc3904e1c02333bc1ae0173e0a44aa37a8e95d07e681601246e1f1"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:b47a54a9700de3eabefd36dabfb237808acae47bc873cada6be6990ef6b165aa"}, - {file = "fastavro-1.9.5-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:48c7b5e6d2f3bf7917af301c275b05c5be3dd40bb04e80979c9e7a2ab31a00d1"}, - {file = "fastavro-1.9.5-cp38-cp38-win_amd64.whl", hash = "sha256:05d13f98d4e325be40387e27da9bd60239968862fe12769258225c62ec906f04"}, - {file = "fastavro-1.9.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5b47948eb196263f6111bf34e1cd08d55529d4ed46eb50c1bc8c7c30a8d18868"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85b7a66ad521298ad9373dfe1897a6ccfc38feab54a47b97922e213ae5ad8870"}, - {file = "fastavro-1.9.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44cb154f863ad80e41aea72a709b12e1533b8728c89b9b1348af91a6154ab2f5"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5f7f2b1fe21231fd01f1a2a90e714ae267fe633cd7ce930c0aea33d1c9f4901"}, - {file = "fastavro-1.9.5-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88fbbe16c61d90a89d78baeb5a34dc1c63a27b115adccdbd6b1fb6f787deacf2"}, - {file = "fastavro-1.9.5-cp39-cp39-win_amd64.whl", hash = "sha256:753f5eedeb5ca86004e23a9ce9b41c5f25eb64a876f95edcc33558090a7f3e4b"}, - {file = "fastavro-1.9.5.tar.gz", hash = "sha256:6419ebf45f88132a9945c51fe555d4f10bb97c236288ed01894f957c6f914553"}, -] - -[package.extras] -codecs = ["cramjam", "lz4", "zstandard"] -lz4 = ["lz4"] -snappy = ["cramjam"] -zstandard = ["zstandard"] - [[package]] name = "frozenlist" -version = "1.4.1" +version = "1.6.0" description = "A list-like structure which implements collections.abc.MutableSequence" optional = false -python-versions = ">=3.8" -files = [ - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f9aa1878d1083b276b0196f2dfbe00c9b7e752475ed3b682025ff20c1c1f51ac"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:29acab3f66f0f24674b7dc4736477bcd4bc3ad4b896f5f45379a67bce8b96868"}, - {file = "frozenlist-1.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:74fb4bee6880b529a0c6560885fce4dc95936920f9f20f53d99a213f7bf66776"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:590344787a90ae57d62511dd7c736ed56b428f04cd8c161fcc5e7232c130c69a"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:068b63f23b17df8569b7fdca5517edef76171cf3897eb68beb01341131fbd2ad"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c849d495bf5154cd8da18a9eb15db127d4dba2968d88831aff6f0331ea9bd4c"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9750cc7fe1ae3b1611bb8cfc3f9ec11d532244235d75901fb6b8e42ce9229dfe"}, - {file = "frozenlist-1.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9b2de4cf0cdd5bd2dee4c4f63a653c61d2408055ab77b151c1957f221cabf2a"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0633c8d5337cb5c77acbccc6357ac49a1770b8c487e5b3505c57b949b4b82e98"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:27657df69e8801be6c3638054e202a135c7f299267f1a55ed3a598934f6c0d75"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:f9a3ea26252bd92f570600098783d1371354d89d5f6b7dfd87359d669f2109b5"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4f57dab5fe3407b6c0c1cc907ac98e8a189f9e418f3b6e54d65a718aaafe3950"}, - {file = "frozenlist-1.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e02a0e11cf6597299b9f3bbd3f93d79217cb90cfd1411aec33848b13f5c656cc"}, - {file = "frozenlist-1.4.1-cp310-cp310-win32.whl", hash = "sha256:a828c57f00f729620a442881cc60e57cfcec6842ba38e1b19fd3e47ac0ff8dc1"}, - {file = "frozenlist-1.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:f56e2333dda1fe0f909e7cc59f021eba0d2307bc6f012a1ccf2beca6ba362439"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a0cb6f11204443f27a1628b0e460f37fb30f624be6051d490fa7d7e26d4af3d0"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b46c8ae3a8f1f41a0d2ef350c0b6e65822d80772fe46b653ab6b6274f61d4a49"}, - {file = "frozenlist-1.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fde5bd59ab5357e3853313127f4d3565fc7dad314a74d7b5d43c22c6a5ed2ced"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:722e1124aec435320ae01ee3ac7bec11a5d47f25d0ed6328f2273d287bc3abb0"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2471c201b70d58a0f0c1f91261542a03d9a5e088ed3dc6c160d614c01649c106"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c757a9dd70d72b076d6f68efdbb9bc943665ae954dad2801b874c8c69e185068"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f146e0911cb2f1da549fc58fc7bcd2b836a44b79ef871980d605ec392ff6b0d2"}, - {file = "frozenlist-1.4.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f9c515e7914626b2a2e1e311794b4c35720a0be87af52b79ff8e1429fc25f19"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c302220494f5c1ebeb0912ea782bcd5e2f8308037b3c7553fad0e48ebad6ad82"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:442acde1e068288a4ba7acfe05f5f343e19fac87bfc96d89eb886b0363e977ec"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:1b280e6507ea8a4fa0c0a7150b4e526a8d113989e28eaaef946cc77ffd7efc0a"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:fe1a06da377e3a1062ae5fe0926e12b84eceb8a50b350ddca72dc85015873f74"}, - {file = "frozenlist-1.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db9e724bebd621d9beca794f2a4ff1d26eed5965b004a97f1f1685a173b869c2"}, - {file = "frozenlist-1.4.1-cp311-cp311-win32.whl", hash = "sha256:e774d53b1a477a67838a904131c4b0eef6b3d8a651f8b138b04f748fccfefe17"}, - {file = "frozenlist-1.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb3c2db03683b5767dedb5769b8a40ebb47d6f7f45b1b3e3b4b51ec8ad9d9825"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1979bc0aeb89b33b588c51c54ab0161791149f2461ea7c7c946d95d5f93b56ae"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cc7b01b3754ea68a62bd77ce6020afaffb44a590c2289089289363472d13aedb"}, - {file = "frozenlist-1.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c9c92be9fd329ac801cc420e08452b70e7aeab94ea4233a4804f0915c14eba9b"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c3894db91f5a489fc8fa6a9991820f368f0b3cbdb9cd8849547ccfab3392d86"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ba60bb19387e13597fb059f32cd4d59445d7b18b69a745b8f8e5db0346f33480"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aefbba5f69d42246543407ed2461db31006b0f76c4e32dfd6f42215a2c41d09"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780d3a35680ced9ce682fbcf4cb9c2bad3136eeff760ab33707b71db84664e3a"}, - {file = "frozenlist-1.4.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9acbb16f06fe7f52f441bb6f413ebae6c37baa6ef9edd49cdd567216da8600cd"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:23b701e65c7b36e4bf15546a89279bd4d8675faabc287d06bbcfac7d3c33e1e6"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3e0153a805a98f5ada7e09826255ba99fb4f7524bb81bf6b47fb702666484ae1"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:dd9b1baec094d91bf36ec729445f7769d0d0cf6b64d04d86e45baf89e2b9059b"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a4471094e146b6790f61b98616ab8e44f72661879cc63fa1049d13ef711e71e"}, - {file = "frozenlist-1.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5667ed53d68d91920defdf4035d1cdaa3c3121dc0b113255124bcfada1cfa1b8"}, - {file = "frozenlist-1.4.1-cp312-cp312-win32.whl", hash = "sha256:beee944ae828747fd7cb216a70f120767fc9f4f00bacae8543c14a6831673f89"}, - {file = "frozenlist-1.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:64536573d0a2cb6e625cf309984e2d873979709f2cf22839bf2d61790b448ad5"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:20b51fa3f588ff2fe658663db52a41a4f7aa6c04f6201449c6c7c476bd255c0d"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:410478a0c562d1a5bcc2f7ea448359fcb050ed48b3c6f6f4f18c313a9bdb1826"}, - {file = "frozenlist-1.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6321c9efe29975232da3bd0af0ad216800a47e93d763ce64f291917a381b8eb"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48f6a4533887e189dae092f1cf981f2e3885175f7a0f33c91fb5b7b682b6bab6"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6eb73fa5426ea69ee0e012fb59cdc76a15b1283d6e32e4f8dc4482ec67d1194d"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fbeb989b5cc29e8daf7f976b421c220f1b8c731cbf22b9130d8815418ea45887"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32453c1de775c889eb4e22f1197fe3bdfe457d16476ea407472b9442e6295f7a"}, - {file = "frozenlist-1.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693945278a31f2086d9bf3df0fe8254bbeaef1fe71e1351c3bd730aa7d31c41b"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1d0ce09d36d53bbbe566fe296965b23b961764c0bcf3ce2fa45f463745c04701"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3a670dc61eb0d0eb7080890c13de3066790f9049b47b0de04007090807c776b0"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:dca69045298ce5c11fd539682cff879cc1e664c245d1c64da929813e54241d11"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a06339f38e9ed3a64e4c4e43aec7f59084033647f908e4259d279a52d3757d09"}, - {file = "frozenlist-1.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b7f2f9f912dca3934c1baec2e4585a674ef16fe00218d833856408c48d5beee7"}, - {file = "frozenlist-1.4.1-cp38-cp38-win32.whl", hash = "sha256:e7004be74cbb7d9f34553a5ce5fb08be14fb33bc86f332fb71cbe5216362a497"}, - {file = "frozenlist-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:5a7d70357e7cee13f470c7883a063aae5fe209a493c57d86eb7f5a6f910fae09"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bfa4a17e17ce9abf47a74ae02f32d014c5e9404b6d9ac7f729e01562bbee601e"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b7e3ed87d4138356775346e6845cccbe66cd9e207f3cd11d2f0b9fd13681359d"}, - {file = "frozenlist-1.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c99169d4ff810155ca50b4da3b075cbde79752443117d89429595c2e8e37fed8"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb678da49d9f72c9f6c609fbe41a5dfb9a9282f9e6a2253d5a91e0fc382d7c0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6db4667b187a6742b33afbbaf05a7bc551ffcf1ced0000a571aedbb4aa42fc7b"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55fdc093b5a3cb41d420884cdaf37a1e74c3c37a31f46e66286d9145d2063bd0"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82e8211d69a4f4bc360ea22cd6555f8e61a1bd211d1d5d39d3d228b48c83a897"}, - {file = "frozenlist-1.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89aa2c2eeb20957be2d950b85974b30a01a762f3308cd02bb15e1ad632e22dc7"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d3e0c25a2350080e9319724dede4f31f43a6c9779be48021a7f4ebde8b2d742"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7268252af60904bf52c26173cbadc3a071cece75f873705419c8681f24d3edea"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0c250a29735d4f15321007fb02865f0e6b6a41a6b88f1f523ca1596ab5f50bd5"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:96ec70beabbd3b10e8bfe52616a13561e58fe84c0101dd031dc78f250d5128b9"}, - {file = "frozenlist-1.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:23b2d7679b73fe0e5a4560b672a39f98dfc6f60df63823b0a9970525325b95f6"}, - {file = "frozenlist-1.4.1-cp39-cp39-win32.whl", hash = "sha256:a7496bfe1da7fb1a4e1cc23bb67c58fab69311cc7d32b5a99c2007b4b2a0e932"}, - {file = "frozenlist-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:e6a20a581f9ce92d389a8c7d7c3dd47c81fd5d6e655c8dddf341e14aa48659d0"}, - {file = "frozenlist-1.4.1-py3-none-any.whl", hash = "sha256:04ced3e6a46b4cfffe20f9ae482818e34eba9b5fb0ce4056e4cc9b6e212d09b7"}, - {file = "frozenlist-1.4.1.tar.gz", hash = "sha256:c037a86e8513059a2613aaba4d817bb90b9d9b6b69aace3ce9c877e8c8ed402b"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3"}, + {file = "frozenlist-1.6.0-cp310-cp310-win32.whl", hash = "sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812"}, + {file = "frozenlist-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e"}, + {file = "frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860"}, + {file = "frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770"}, + {file = "frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc"}, + {file = "frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e"}, + {file = "frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4"}, + {file = "frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c"}, + {file = "frozenlist-1.6.0-cp39-cp39-win32.whl", hash = "sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530"}, + {file = "frozenlist-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572"}, + {file = "frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191"}, + {file = "frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68"}, ] [[package]] @@ -1011,6 +993,7 @@ version = "1.3.0" description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, @@ -1018,13 +1001,14 @@ files = [ [[package]] name = "google-auth" -version = "2.32.0" +version = "2.39.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "google_auth-2.32.0-py2.py3-none-any.whl", hash = "sha256:53326ea2ebec768070a94bee4e1b9194c9646ea0c2bd72422785bd0f9abfad7b"}, - {file = "google_auth-2.32.0.tar.gz", hash = "sha256:49315be72c55a6a37d62819e3573f6b416aca00721f7e3e31a008d928bf64022"}, + {file = "google_auth-2.39.0-py2.py3-none-any.whl", hash = "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2"}, + {file = "google_auth-2.39.0.tar.gz", hash = "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"}, ] [package.dependencies] @@ -1033,94 +1017,97 @@ pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] [[package]] name = "googleapis-common-protos" -version = "1.65.0" +version = "1.70.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, ] [package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.2.1" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +files = [ + {file = "greenlet-3.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:777c1281aa7c786738683e302db0f55eb4b0077c20f1dc53db8852ffaea0a6b0"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3059c6f286b53ea4711745146ffe5a5c5ff801f62f6c56949446e0f6461f8157"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1a40a17e2c7348f5eee5d8e1b4fa6a937f0587eba89411885a36a8e1fc29bd2"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5193135b3a8d0017cb438de0d49e92bf2f6c1c770331d24aa7500866f4db4017"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639a94d001fe874675b553f28a9d44faed90f9864dc57ba0afef3f8d76a18b04"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fe303381e7e909e42fb23e191fc69659910909fdcd056b92f6473f80ef18543"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:72c9b668454e816b5ece25daac1a42c94d1c116d5401399a11b77ce8d883110c"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6079ae990bbf944cf66bea64a09dcb56085815630955109ffa98984810d71565"}, + {file = "greenlet-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:e63cd2035f49376a23611fbb1643f78f8246e9d4dfd607534ec81b175ce582c2"}, + {file = "greenlet-3.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:aa30066fd6862e1153eaae9b51b449a6356dcdb505169647f69e6ce315b9468b"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b0f3a0a67786facf3b907a25db80efe74310f9d63cc30869e49c79ee3fcef7e"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64a4d0052de53ab3ad83ba86de5ada6aeea8f099b4e6c9ccce70fb29bc02c6a2"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852ef432919830022f71a040ff7ba3f25ceb9fe8f3ab784befd747856ee58530"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4818116e75a0dd52cdcf40ca4b419e8ce5cb6669630cb4f13a6c384307c9543f"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9afa05fe6557bce1642d8131f87ae9462e2a8e8c46f7ed7929360616088a3975"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5c12f0d17a88664757e81a6e3fc7c2452568cf460a2f8fb44f90536b2614000b"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dbb4e1aa2000852937dd8f4357fb73e3911da426df8ca9b8df5db231922da474"}, + {file = "greenlet-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:cb5ee928ce5fedf9a4b0ccdc547f7887136c4af6109d8f2fe8e00f90c0db47f5"}, + {file = "greenlet-3.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0ba2811509a30e5f943be048895a983a8daf0b9aa0ac0ead526dfb5d987d80ea"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4245246e72352b150a1588d43ddc8ab5e306bef924c26571aafafa5d1aaae4e8"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7abc0545d8e880779f0c7ce665a1afc3f72f0ca0d5815e2b006cafc4c1cc5840"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dcc6d604a6575c6225ac0da39df9335cc0c6ac50725063fa90f104f3dbdb2c9"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2273586879affca2d1f414709bb1f61f0770adcabf9eda8ef48fd90b36f15d12"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff38c869ed30fff07f1452d9a204ece1ec6d3c0870e0ba6e478ce7c1515acf22"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e934591a7a4084fa10ee5ef50eb9d2ac8c4075d5c9cf91128116b5dca49d43b1"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:063bcf7f8ee28eb91e7f7a8148c65a43b73fbdc0064ab693e024b5a940070145"}, + {file = "greenlet-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7132e024ebeeeabbe661cf8878aac5d2e643975c4feae833142592ec2f03263d"}, + {file = "greenlet-3.2.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:e1967882f0c42eaf42282a87579685c8673c51153b845fde1ee81be720ae27ac"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77ae69032a95640a5fe8c857ec7bee569a0997e809570f4c92048691ce4b437"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3227c6ec1149d4520bc99edac3b9bc8358d0034825f3ca7572165cb502d8f29a"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ddda0197c5b46eedb5628d33dad034c455ae77708c7bf192686e760e26d6a0c"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de62b542e5dcf0b6116c310dec17b82bb06ef2ceb696156ff7bf74a7a498d982"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c07a0c01010df42f1f058b3973decc69c4d82e036a951c3deaf89ab114054c07"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2530bfb0abcd451ea81068e6d0a1aac6dabf3f4c23c8bd8e2a8f579c2dd60d95"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c472adfca310f849903295c351d297559462067f618944ce2650a1878b84123"}, + {file = "greenlet-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:24a496479bc8bd01c39aa6516a43c717b4cee7196573c47b1f8e1011f7c12495"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175d583f7d5ee57845591fc30d852b75b144eb44b05f38b67966ed6df05c8526"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecc9d33ca9428e4536ea53e79d781792cee114d2fa2695b173092bdbd8cd6d5"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f56382ac4df3860ebed8ed838f268f03ddf4e459b954415534130062b16bc32"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc45a7189c91c0f89aaf9d69da428ce8301b0fd66c914a499199cfb0c28420fc"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51a2f49da08cff79ee42eb22f1658a2aed60c72792f0a0a95f5f0ca6d101b1fb"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:0c68bbc639359493420282d2f34fa114e992a8724481d700da0b10d10a7611b8"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:e775176b5c203a1fa4be19f91da00fd3bff536868b77b237da3f4daa5971ae5d"}, + {file = "greenlet-3.2.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d6668caf15f181c1b82fb6406f3911696975cc4c37d782e19cb7ba499e556189"}, + {file = "greenlet-3.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:17964c246d4f6e1327edd95e2008988a8995ae3a7732be2f9fc1efed1f1cdf8c"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b4ec7f65f0e4a1500ac475c9343f6cc022b2363ebfb6e94f416085e40dea15"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b38d53cf268da963869aa25a6e4cc84c1c69afc1ae3391738b2603d110749d01"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a7490f74e8aabc5f29256765a99577ffde979920a2db1f3676d265a3adba41"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4339b202ac20a89ccd5bde0663b4d00dc62dd25cb3fb14f7f3034dec1b0d9ece"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a750f1046994b9e038b45ae237d68153c29a3a783075211fb1414a180c8324b"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:374ffebaa5fbd10919cd599e5cf8ee18bae70c11f9d61e73db79826c8c93d6f9"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b89e5d44f55372efc6072f59ced5ed1efb7b44213dab5ad7e0caba0232c6545"}, + {file = "greenlet-3.2.1-cp39-cp39-win32.whl", hash = "sha256:b7503d6b8bbdac6bbacf5a8c094f18eab7553481a1830975799042f26c9e101b"}, + {file = "greenlet-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:e98328b8b8f160925d6b1c5b1879d8e64f6bd8cf11472b7127d579da575b77d9"}, + {file = "greenlet-3.2.1.tar.gz", hash = "sha256:9f4dd4b4946b14bb3bf038f81e1d2e535b7d94f1b2a59fdba1293cd9c1a0a4d7"}, ] [package.extras] @@ -1133,6 +1120,7 @@ version = "0.15.4" description = "Simplifies gRPC interceptors" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926"}, {file = "grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d"}, @@ -1146,61 +1134,67 @@ testing = ["protobuf (>=4.21.9)"] [[package]] name = "grpcio" -version = "1.64.1" +version = "1.71.0" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, - {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, - {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, - {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, - {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, - {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, - {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, - {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, - {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, - {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, - {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, - {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, - {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, - {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, - {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, - {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, - {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, - {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, - {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, - {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, - {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.64.1)"] +protobuf = ["grpcio-tools (>=1.71.0)"] [[package]] name = "h11" @@ -1208,6 +1202,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1219,6 +1214,7 @@ version = "1.3.0" description = "Pythonic HTML generation/templating (no template files)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}, {file = "html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}, @@ -1226,13 +1222,14 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.8" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, + {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, ] [package.dependencies] @@ -1243,65 +1240,74 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httptools" -version = "0.6.1" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +groups = ["main"] +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] +test = ["Cython (>=0.29.24)"] [[package]] name = "httpx" -version = "0.28.0" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc"}, - {file = "httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -1311,7 +1317,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1319,13 +1325,14 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "httpx-ws" -version = "0.6.0" +version = "0.7.2" description = "WebSockets support for HTTPX" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "httpx_ws-0.6.0-py3-none-any.whl", hash = "sha256:437cfca94519a4e6ae06eb5573192df6c0da85c22b1a19cc1ea0b02b05a51d25"}, - {file = "httpx_ws-0.6.0.tar.gz", hash = "sha256:60218f531fb474a2143af38568f4b7d94ba356780973443365c8e2c87882bb8c"}, + {file = "httpx_ws-0.7.2-py3-none-any.whl", hash = "sha256:dd7bf9dbaa96dcd5cef1af3a7e1130cfac068bebecce25a74145022f5a8427a3"}, + {file = "httpx_ws-0.7.2.tar.gz", hash = "sha256:93edea6c8fc313464fc287bff7d2ad20e6196b7754c76f946f73b4af79886d4e"}, ] [package.dependencies] @@ -1336,21 +1343,26 @@ wsproto = "*" [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "inflect" version = "5.6.2" description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238"}, {file = "inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9"}, @@ -1358,18 +1370,7 @@ files = [ [package.extras] docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -optional = false -python-versions = ">=3.5" -files = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] [[package]] name = "isort" @@ -1377,6 +1378,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1387,13 +1389,14 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1403,25 +1406,27 @@ MarkupSafe = ">=2.0" i18n = ["Babel (>=2.7)"] [[package]] -name = "json5" -version = "0.9.25" -description = "A Python implementation of the JSON5 data format." +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "json5-0.9.25-py3-none-any.whl", hash = "sha256:34ed7d834b1341a86987ed52f3f76cd8ee184394906b6e22a1e0deb9ab294e8f"}, - {file = "json5-0.9.25.tar.gz", hash = "sha256:548e41b9be043f9426776f05df8635a00fe06104ea51ed24b67f908856e151ae"}, + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, ] [[package]] name = "kr8s" -version = "0.18.1" +version = "0.20.7" description = "A Kubernetes API library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "kr8s-0.18.1-py3-none-any.whl", hash = "sha256:192d659c70c7650e7641c3c69a656ac16e51672118468eef8224ea60009932c4"}, - {file = "kr8s-0.18.1.tar.gz", hash = "sha256:73c864c108e2f5159faab8dba9833011d586918f4520dfc64594df7b7907493f"}, + {file = "kr8s-0.20.7-py3-none-any.whl", hash = "sha256:e489b97ff513c167f427f479ad5420c78adffd1a6ce5033b079109374200c0c6"}, + {file = "kr8s-0.20.7.tar.gz", hash = "sha256:ac45e966beea0f6f92f635b3e61e64b8e27962b4825d77b814a663e819a8ec16"}, ] [package.dependencies] @@ -1429,7 +1434,7 @@ anyio = ">=3.7.0" asyncache = ">=0.3.1" cryptography = ">=35" httpx = ">=0.24.1" -httpx-ws = ">=0.5.2" +httpx-ws = ">=0.7.0" python-box = ">=7.0.1" python-jsonpath = ">=0.7.1" pyyaml = ">=6.0" @@ -1445,6 +1450,7 @@ version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, @@ -1468,13 +1474,14 @@ adal = ["adal (>=1.0.2)"] [[package]] name = "kubernetes-asyncio" -version = "31.1.0" +version = "32.3.0" description = "Kubernetes asynchronous python client" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "kubernetes_asyncio-31.1.0-py3-none-any.whl", hash = "sha256:76898fea5dee601b209fefeae4ecee2fb20bfe3ebf872b5ff37c96230fbda6cc"}, - {file = "kubernetes_asyncio-31.1.0.tar.gz", hash = "sha256:00128a96eb0284de0cbee53bd2fe044593f2e1547c48d09901cddf9258adfd88"}, + {file = "kubernetes_asyncio-32.3.0-py3-none-any.whl", hash = "sha256:3a0769d4bf39c638e474c76cd22f4aa81903db5ebd14573c1e3b3b7ebbf86fbc"}, + {file = "kubernetes_asyncio-32.3.0.tar.gz", hash = "sha256:3efdc39776f4e1c892ce08b74364e67be6c1d6870cba01ab27bb296fdc6fc485"}, ] [package.dependencies] @@ -1485,15 +1492,33 @@ pyyaml = ">=3.12" six = ">=1.9.0" urllib3 = ">=1.24.2" +[[package]] +name = "lark" +version = "0.12.0" +description = "a modern parsing library" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "lark-0.12.0-py2.py3-none-any.whl", hash = "sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca"}, + {file = "lark-0.12.0.tar.gz", hash = "sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef"}, +] + +[package.extras] +atomic-cache = ["atomicwrites"] +nearley = ["js2py"] +regex = ["regex"] + [[package]] name = "mako" -version = "1.3.5" +version = "1.3.10" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, ] [package.dependencies] @@ -1504,84 +1529,102 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-code-runner" +version = "2.2.0" +description = "Automatically execute code blocks within a Markdown file and update the output in-place" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markdown_code_runner-2.2.0-py3-none-any.whl", hash = "sha256:d8812c48ad3fd4a3f3725dfcd5a1b7e5baf7216855eeea8a92c7fd9120717ac6"}, + {file = "markdown_code_runner-2.2.0.tar.gz", hash = "sha256:3c495998a437bc7d7a4b1a5ce518bce10cf5ba0fa69c569fee1e32c5238603c4"}, +] + +[package.extras] +test = ["coverage", "pre-commit", "pytest", "pytest-cov"] + [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "marshmallow" -version = "3.23.1" +version = "3.26.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, - {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, ] [package.dependencies] @@ -1589,143 +1632,154 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] tests = ["pytest", "simplejson"] [[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" optional = false -python-versions = ">=3.7" +python-versions = "*" +groups = ["main"] files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, ] [[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." +name = "multidict" +version = "6.4.3" +description = "multidict implementation" optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, ] [[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.10" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - [[package]] name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -1738,13 +1792,26 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "24.1" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "parsy" +version = "2.1" +description = "Easy-to-use parser combinators, for parsing in pure Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "parsy-2.1-py3-none-any.whl", hash = "sha256:8f18e7b11985e7802e7e3ecbd8291c6ca243d29820b1186e4c84605db4efffa0"}, + {file = "parsy-2.1.tar.gz", hash = "sha256:fd5dd18d7b0b61f8275ee88665f430a20c02cf5a82d88557f35330530186d7ac"}, ] [[package]] @@ -1753,6 +1820,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1760,19 +1828,46 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.7" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "posthog" +version = "3.25.0" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "posthog-3.25.0-py2.py3-none-any.whl", hash = "sha256:85db78c13d1ecb11aed06fad53759c4e8fb3633442c2f3d0336bc0ce8a585d30"}, + {file = "posthog-3.25.0.tar.gz", hash = "sha256:9168f3e7a0a5571b6b1065c41b3c171fbc68bfe72c3ac0bfd6e3d2fcdb7df2ca"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +distro = ">=1.5.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"] +langchain = ["langchain (>=0.2.0)"] +sentry = ["django", "sentry-sdk"] +test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"] [[package]] name = "prometheus-client" @@ -1780,6 +1875,7 @@ version = "0.7.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "prometheus_client-0.7.1.tar.gz", hash = "sha256:71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da"}, ] @@ -1793,6 +1889,7 @@ version = "3.0.0" description = "Exposes Prometheus monitoring metrics of Sanic apps." optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "prometheus-sanic-3.0.0.tar.gz", hash = "sha256:06cfe8f9c843a1324fa801b9092f26470a63196b9e08fad0c0f12b49ddbf6c3c"}, {file = "prometheus_sanic-3.0.0-py3-none-any.whl", hash = "sha256:499110bf2a86f921b229083e0bcea4d489420abf6737e0d838cd234394fd91aa"}, @@ -1802,169 +1899,277 @@ files = [ prometheus-client = ">=0.7.1,<0.8.0" sanic = ">=22.0.0" +[[package]] +name = "propcache" +version = "0.3.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, + {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, + {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, + {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, + {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, + {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, + {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, + {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, + {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, + {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, + {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, + {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, + {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, + {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, + {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, +] + [[package]] name = "protobuf" -version = "5.29.0" +version = "5.29.4" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "protobuf-5.29.0-cp310-abi3-win32.whl", hash = "sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2"}, - {file = "protobuf-5.29.0-cp310-abi3-win_amd64.whl", hash = "sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069"}, - {file = "protobuf-5.29.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20"}, - {file = "protobuf-5.29.0-cp38-cp38-win32.whl", hash = "sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a"}, - {file = "protobuf-5.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86"}, - {file = "protobuf-5.29.0-cp39-cp39-win32.whl", hash = "sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac"}, - {file = "protobuf-5.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368"}, - {file = "protobuf-5.29.0-py3-none-any.whl", hash = "sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f"}, - {file = "protobuf-5.29.0.tar.gz", hash = "sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001"}, + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, ] +[[package]] +name = "protovalidate" +version = "0.7.1" +description = "Protocol Buffer Validation for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "protovalidate-0.7.1-py3-none-any.whl", hash = "sha256:6788b1baa10c2e9453c3a3eef5f87a3e9c871bc9a7110b506aefd764269c8b3e"}, + {file = "protovalidate-0.7.1.tar.gz", hash = "sha256:12bd7c126fc000c5cbee5bf0f4cd01e0ba0e353f585b0aaa68df03e788939412"}, +] + +[package.dependencies] +cel-python = "*" +protobuf = "*" + [[package]] name = "psycopg" -version = "3.2.3" +version = "3.2.6" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, - {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, + {file = "psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58"}, + {file = "psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a"}, ] [package.dependencies] -psycopg-binary = {version = "3.2.3", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} +psycopg-binary = {version = "3.2.6", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.3)"] -c = ["psycopg-c (==3.2.3)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.11)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.2.6) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.6) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.2.3" +version = "3.2.6" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.8" -files = [ - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73adc05452fb85e7a12ed3f69c81540a8875960739082e6ea5e28c373a30774"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8630943143c6d6ca9aefc88bbe5e76c90553f4e1a3b2dc339e67dc34aa86f7e"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bffb61e198a91f712cc3d7f2d176a697cb05b284b2ad150fb8edb308eba9002"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4fa2240c9fceddaa815a58f29212826fafe43ce80ff666d38c4a03fb036955"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:192a5f8496e6e1243fdd9ac20e117e667c0712f148c5f9343483b84435854c78"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64dc6e9ec64f592f19dc01a784e87267a64a743d34f68488924251253da3c818"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:79498df398970abcee3d326edd1d4655de7d77aa9aecd578154f8af35ce7bbd2"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:949551752930d5e478817e0b49956350d866b26578ced0042a61967e3fcccdea"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:80a2337e2dfb26950894c8301358961430a0304f7bfe729d34cc036474e9c9b1"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6d8f2144e0d5808c2e2aed40fbebe13869cd00c2ae745aca4b3b16a435edb056"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94253be2b57ef2fea7ffe08996067aabf56a1eb9648342c9e3bad9e10c46e045"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda0162b0dbfa5eaed6cdc708179fa27e148cb8490c7d62e5cf30713909658ea"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0419cdad8c70eaeb3116bb28e7b42d546f91baf5179d7556f230d40942dc78"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74fbf5dd3ef09beafd3557631e282f00f8af4e7a78fbfce8ab06d9cd5a789aae"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d784f614e4d53050cbe8abf2ae9d1aaacf8ed31ce57b42ce3bf2a48a66c3a5c"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4e76ce2475ed4885fe13b8254058be710ec0de74ebd8ef8224cf44a9a3358e5f"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5938b257b04c851c2d1e6cb2f8c18318f06017f35be9a5fe761ee1e2e344dfb7"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:257c4aea6f70a9aef39b2a77d0658a41bf05c243e2bf41895eb02220ac6306f3"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:06b5cc915e57621eebf2393f4173793ed7e3387295f07fed93ed3fb6a6ccf585"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:09baa041856b35598d335b1a74e19a49da8500acedf78164600694c0ba8ce21b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:48f8ca6ee8939bab760225b2ab82934d54330eec10afe4394a92d3f2a0c37dd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5361ea13c241d4f0ec3f95e0bf976c15e2e451e9cc7ef2e5ccfc9d170b197a40"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb987f14af7da7c24f803111dbc7392f5070fd350146af3345103f76ea82e339"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0463a11b1cace5a6aeffaf167920707b912b8986a9c7920341c75e3686277920"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b7be9a6c06518967b641fb15032b1ed682fd3b0443f64078899c61034a0bca6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64a607e630d9f4b2797f641884e52b9f8e239d35943f51bef817a384ec1678fe"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fa33ead69ed133210d96af0c63448b1385df48b9c0247eda735c5896b9e6dbbf"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1f8b0d0e99d8e19923e6e07379fa00570be5182c201a8c0b5aaa9a4d4a4ea20b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:709447bd7203b0b2debab1acec23123eb80b386f6c29e7604a5d4326a11e5bd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e37d5027e297a627da3551a1e962316d0f88ee4ada74c768f6c9234e26346d9"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:261f0031ee6074765096a19b27ed0f75498a8338c3dcd7f4f0d831e38adf12d1"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:41fdec0182efac66b27478ac15ef54c9ebcecf0e26ed467eb7d6f262a913318b"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:07d019a786eb020c0f984691aa1b994cb79430061065a694cf6f94056c603d26"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57615791a337378fe5381143259a6c432cdcbb1d3e6428bfb7ce59fff3fb5c"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8eb9a4e394926b93ad919cad1b0a918e9b4c846609e8c1cfb6b743683f64da0"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5905729668ef1418bd36fbe876322dcb0f90b46811bba96d505af89e6fbdce2f"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd65774ed7d65101b314808b6893e1a75b7664f680c3ef18d2e5c84d570fa393"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:700679c02f9348a0d0a2adcd33a0275717cd0d0aee9d4482b47d935023629505"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96334bb64d054e36fed346c50c4190bad9d7c586376204f50bede21a913bf942"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9099e443d4cc24ac6872e6a05f93205ba1a231b1a8917317b07c9ef2b955f1f4"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1985ab05e9abebfbdf3163a16ebb37fbc5d49aff2bf5b3d7375ff0920bbb54cd"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:e90352d7b610b4693fad0feea48549d4315d10f1eba5605421c92bb834e90170"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69320f05de8cdf4077ecd7fefdec223890eea232af0d58f2530cbda2871244a0"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4926ea5c46da30bec4a85907aa3f7e4ea6313145b2aa9469fdb861798daf1502"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64c4cd0d50d5b2288ab1bcb26c7126c772bbdebdfadcd77225a77df01c4a57e"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05a1bdce30356e70a05428928717765f4a9229999421013f41338d9680d03a63"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad357e426b0ea5c3043b8ec905546fa44b734bf11d33b3da3959f6e4447d350"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:967b47a0fd237aa17c2748fdb7425015c394a6fb57cdad1562e46a6eb070f96d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:71db8896b942770ed7ab4efa59b22eee5203be2dfdee3c5258d60e57605d688c"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2773f850a778575dd7158a6dd072f7925b67f3ba305e2003538e8831fec77a1d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aeddf7b3b3f6e24ccf7d0edfe2d94094ea76b40e831c16eff5230e040ce3b76b"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:824c867a38521d61d62b60aca7db7ca013a2b479e428a0db47d25d8ca5067410"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9994f7db390c17fc2bd4c09dca722fd792ff8a49bb3bdace0c50a83f22f1767d"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1303bf8347d6be7ad26d1362af2c38b3a90b8293e8d56244296488ee8591058e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:842da42a63ecb32612bb7f5b9e9f8617eab9bc23bd58679a441f4150fcc51c96"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb342a01c76f38a12432848e6013c57eb630103e7556cf79b705b53814c3949"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40af959173ea0d087b6b232b855cfeaa6738f47cb2a0fd10a7f4fa8b74293f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b60b465773a52c7d4705b0a751f7f1cdccf81dd12aee3b921b31a6e76b07b0e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc6d87a1c44df8d493ef44988a3ded751e284e02cdf785f746c2d357e99782a6"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f0b018e37608c3bfc6039a1dc4eb461e89334465a19916be0153c757a78ea426"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a29f5294b0b6360bfda69653697eff70aaf2908f58d1073b0acd6f6ab5b5a4f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:e56b1fd529e5dde2d1452a7d72907b37ed1b4f07fdced5d8fb1e963acfff6749"}, +groups = ["main"] +markers = "implementation_name != \"pypy\"" +files = [ + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1b639acb3e24243c23f75700bf6e3af7b76da92523ec7c3196a13aaf0b578453"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1b5c359173726b38d7acbb9f73270f269591d8031d099c1a70dd3f3d22b0e8a8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3434efe7c00f505f4c1e531519dac6c701df738ba7a1328eac81118d80019132"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bca8d9643191b13193940bbf84d51ac5a747e965c230177258fb02b8043fb7a"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55fa40f11d37e6e5149a282a5fd7e0734ce55c623673bfba638480914fd1414c"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0690ac1061c655b1bcbe9284d07bf5276bc9c0d788a6c74aaf3b042e64984b83"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9a4a9967ff650d2821d5fad6bec7b15f4c2072603e9fa3f89a39f351ade1fd3"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6f2894cc7aee8a15fe591e8536911d9c015cb404432cf7bdac2797e54cb2ba8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:05560c81312d7c2bee95a9860cd25198677f2320fb4a3527bc04e8cae7fcfb64"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4269cd23a485d6dd6eb6b10841c94551a53091cf0b1b6d5247a6a341f53f0d95"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:7942f35a6f314608720116bcd9de240110ceadffd2ac5c34f68f74a31e52e46a"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7afe181f6b3eb714362e9b6a2dc2a589bff60471a1d8639fd231a4e426e01523"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34bb0fceba0773dc0bfb53224bb2c0b19dc97ea0a997a223615484cf02cae55c"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54120122d2779dcd307f49e1f921d757fe5dacdced27deab37f277eef0c52a5b"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:816aa556f63b2303e66ba6c8888a8b3f3e6e4e47049ec7a4d62c84ac60b091ca"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19a0ba351eda9a59babf8c7c9d89c7bbc5b26bf096bc349b096bd0dd2482088"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e197e01290ef818a092c877025fc28096adbb6d0743e313491a21aab31bd96"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:274794b4b29ef426e09086404446b61a146f5e756da71366c5a6d57abec31f7d"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:69845bdc0db519e1dfc27932cd3d5b1ecb3f72950af52a1987508ab0b52b3b55"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:66c3bed2caf0d1cabcb9365064de183b5209a7cbeaa131e79e68f350c9c963c2"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e3ae3201fe85c7f901349a2cf52f02ceca4cb97a5e2e2ac8b8a1c9a6eb747bed"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:58f443b4df2adb59937c96775fadf4967f93d952fbcc82394446985faec11041"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f27a46ff0497e882e8c0286e8833c785b4d1a80f23e1bf606f4c90e5f9f3ce75"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b30ee4821ded7de48b8048b14952512588e7c5477b0a5965221e1798afba61a1"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e57edf3b1f5427f39660225b01f8e7b97f5cfab132092f014bf1638bc85d81d2"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c5172ce3e4ae7a4fd450070210f801e2ce6bc0f11d1208d29268deb0cda34de"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfab3804c43571a6615e559cdc4c4115785d258a4dd71a721be033f5f5f378d"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa1c920cce16f1205f37b20c685c58b9656b170b8b4c93629100d342d0d118e"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e118d818101c1608c6b5ba52a6c977614d8f05aa89467501172ba4d10588e11"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:763319a8bfeca77d31512da71f5a33459b9568a7621c481c3828c62f9c38f351"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2fbc05819560389dbece046966bc88e0f2ea77673497e274c4293b8b4c1d0703"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a57f99bb953b4bd6f32d0a9844664e7f6ca5ead9ba40e96635be3cd30794813"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:5de6809e19a465dcb9c269675bded46a135f2d600cd99f0735afbb21ddad2af4"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54af3fbf871baa2eb19df96fd7dc0cbd88e628a692063c3d1ab5cdd00aa04322"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ad5da1e4636776c21eaeacdec42f25fa4612631a12f25cd9ab34ddf2c346ffb9"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7956b9ea56f79cd86eddcfbfc65ae2af1e4fe7932fa400755005d903c709370"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e2efb763188008cf2914820dcb9fb23c10fe2be0d2c97ef0fac7cec28e281d8"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b3aab3451679f1e7932270e950259ed48c3b79390022d3f660491c0e65e4838"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849a370ac4e125f55f2ad37f928e588291a67ccf91fa33d0b1e042bb3ee1f986"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:566d4ace928419d91f1eb3227fc9ef7b41cf0ad22e93dd2c3368d693cf144408"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f1981f13b10de2f11cfa2f99a8738b35b3f0a0f3075861446894a8d3042430c0"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:36f598300b55b3c983ae8df06473ad27333d2fd9f3e2cfdb913b3a5aaa3a8bcf"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0f4699fa5fe1fffb0d6b2d14b31fd8c29b7ea7375f89d5989f002aaf21728b21"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:afe697b8b0071f497c5d4c0f41df9e038391534f5614f7fb3a8c1ca32d66e860"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da5554553b8d9fb7ab6bb1a37cc53f20ada9024916c60f40c09ab1a675323f2f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b7e3ccc43c395edba8039c9e407b01ed1844304c7f2f4aa99d34d04ed067c83"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d55405efc8a96aa0ecb2d5d6af552d35c744f160b133fa690814a68d9a952c8"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58d5cfb1687b69b3484a034d1aa6e5c11f0c1d46757e978ed59fab59ce83fd37"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3761c4107dab218c32ce4b10b1ae5ed686d41b882bfcb05f5bebc2be9488442f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:45f1526e12cb480586c74670f46563d3090fc2a93e859ccf71efae61f04cef4b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b4d4fd4415d5219785fb082e28d84be4fbd90c3bff3d861877db0aa6b0edd70b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:eb8a1e6b8130fee0b48107739e09553d50c6f031d0b3fcc33f885bb64fa01105"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7adf1460c05f7366f0fe9cf2d24e46abca9eb621705322bbd0c3f3e3a5edb2b4"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:28505f52ceef60554b5ab3289bf5aed2e7e57fa8e9a59a979d82db944e256a6c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:260c43c329e668606388cee78ec0dab083a25c2c6e6f9cf74a130fd5a27b0f87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9870e51fad4684dbdec057fa757d65e61cb2acb16236836e9360044c2a1ec880"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030e9c3082a931e972b029b3cef085784a3bf7f8e18367ae50d5b809aa6e1d87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60c9ed291fbd5e777c2c630dcfd10b7a87d68512b0757d5e7406d9c4895a82a"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e0f4a17a9c376c195e403b4826c18f325bd28f425231d36d1036258bf893e23"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac46da609624b16d961f604b3cbc3233ef43211ef1456a188f8c427109c9c3e1"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e77949b8e7014b85cee0bf6e9e041bcae7719b2693ebf59236368fb0b2a08814"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:532322d9ef6e7d178a4f344970b017110633bcc3dc1c3403efcef55aad612517"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:880c5fd76dcb50bdcc8f87359e5a6c7eb416697cc9aa02854c91223bd999c045"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c0cddc7458b8416d77cd8829d0192466502f31d1fb853d58613cf13ac64f41c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:ea158665676f42b19585dfe948071d3c5f28276f84a97522fb2e82c1d9194563"}, ] [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.2" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, -] - -[package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pyavro-gen" -version = "0.3.3" -description = "A typed class generator for Avro Schemata" -optional = false -python-versions = "*" -files = [ - {file = "pyavro-gen-0.3.3.tar.gz", hash = "sha256:0e2b71c7c3c147326f555ecffcb6b2d5af4f1760b42a85f53a4fe85879f30a69"}, - {file = "pyavro_gen-0.3.3-py3-none-any.whl", hash = "sha256:452f6acb178bf7d7d9eb3c78d1978bfeecefdb3fa2937a4baf3542ae28b6dc49"}, + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, ] [package.dependencies] -avro-preprocessor = ">=0.1.12" -dataclasses-avroschema = ">=0.37.1" -factory-boy = ">=3.2.1" -faker = ">=15.1.1" -isort = ">=5.10.1" -networkx = ">=2.8.7" -pygments = ">=2.13.0" -pytz = ">=2022.5" -undictify = ">=0.11.3" +pyasn1 = ">=0.6.1,<0.7.0" [[package]] name = "pycparser" @@ -1972,6 +2177,8 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1979,157 +2186,146 @@ files = [ [[package]] name = "pydantic" -version = "2.10.2" +version = "2.11.3" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] [package.dependencies] annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.27.1" +pydantic-core = "2.33.1" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.33.1" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, ] [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - [[package]] name = "pyjwt" version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -2146,28 +2342,29 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "python-box" -version = "7.2.0" +version = "7.3.2" description = "Advanced Python dictionaries with dot notation access" optional = false -python-versions = ">=3.8" -files = [ - {file = "python_box-7.2.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:6bdeec791e25258351388b3029a3ec5da302bb9ed3be175493c43cdc6c47f5e3"}, - {file = "python_box-7.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c449f7b3756a71479fa9c61a86e344ac00ed782a66d7662590f0afa294249d18"}, - {file = "python_box-7.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:6b0d61f182d394106d963232854e495b51edc178faa5316a797be1178212d7e0"}, - {file = "python_box-7.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e2d752de8c1204255bf7b0c814c59ef48293c187a7e9fdcd2fefa28024b72032"}, - {file = "python_box-7.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a6c35ea356a386077935958a5debcd5b229b9a1b3b26287a52dfe1a7e65d99"}, - {file = "python_box-7.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:32ed58ec4d9e5475efe69f9c7d773dfea90a6a01979e776da93fd2b0a5d04429"}, - {file = "python_box-7.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a2d664c6a27f7515469b6f1e461935a2038ee130b7d194b4b4db4e85d363618"}, - {file = "python_box-7.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5a7365db1aaf600d3e8a2747fcf6833beb5d45439a54318548f02e302e3ec"}, - {file = "python_box-7.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:739f827056ea148cbea3122d4617c994e829b420b1331183d968b175304e3a4f"}, - {file = "python_box-7.2.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:2617ef3c3d199f55f63c908f540a4dc14ced9b18533a879e6171c94a6a436f23"}, - {file = "python_box-7.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd866bed03087b1d8340014da8c3aaae19135767580641df1b4ae6fff6ac0aa"}, - {file = "python_box-7.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:9681f059e7e92bdf20782cd9ea6e533d4711fc7b8c57a462922a025d46add4d0"}, - {file = "python_box-7.2.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:6b59b1e2741c9ceecdf5a5bd9b90502c24650e609cd824d434fed3b6f302b7bb"}, - {file = "python_box-7.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23fae825d809ae7520fdeac88bb52be55a3b63992120a00e381783669edf589"}, - {file = "python_box-7.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:573b1abdcb7bd745fa404444f060ee62fc35a74f067181e55dcb43cfe92f2827"}, - {file = "python_box-7.2.0-py3-none-any.whl", hash = "sha256:a3c90832dd772cb0197fdb5bc06123b6e1b846899a1b53d9c39450d27a584829"}, - {file = "python_box-7.2.0.tar.gz", hash = "sha256:551af20bdab3a60a2a21e3435120453c4ca32f7393787c3a5036e1d9fc6a0ede"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_box-7.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d136163294fd61a1554db7dd203f2e3035064798d30c17d67d948f0de5c572de"}, + {file = "python_box-7.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d72e96547d8e2c2c333909826e9fae338d9a7e4cde07d5c6058cdd468432c0"}, + {file = "python_box-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:3aa52e3b5cc50c80bb7ef4be3e41e81d095310f619454a7ffd61eef3209a6225"}, + {file = "python_box-7.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:32163b1cb151883de0da62b0cd3572610dc72ccf0762f2447baf1d2562e25bea"}, + {file = "python_box-7.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:064cb59b41e25aaf7dbd39efe53151a5f6797cc1cb3c68610f0f21a9d406d67e"}, + {file = "python_box-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:488f0fba9a6416c3334b602366dcd92825adb0811e07e03753dfcf0ed79cd6f7"}, + {file = "python_box-7.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:39009a2da5c20133718b24891a206592adbe09169856aedc450ad1600fc2e511"}, + {file = "python_box-7.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2a72e2f6fb97c7e472ff3272da207ecc615aa222e52e98352391428527c469"}, + {file = "python_box-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9eead914b9fb7d98a1473f5027dcfe27d26b3a10ffa33b9ba22cf948a23cd280"}, + {file = "python_box-7.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1dfc3b9b073f3d7cad1fa90de98eaaa684a494d0574bbc0666f74fa8307fd6b6"}, + {file = "python_box-7.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca4685a7f764b5a71b6e08535ce2a96b7964bb63d8cb4df10f6bb7147b6c54b"}, + {file = "python_box-7.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e143295f74d47a9ab24562ead2375c9be10629599b57f2e86717d3fff60f82a9"}, + {file = "python_box-7.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f3118ab3076b645c76133b8fac51deee30237cecdcafc3af664c4b9000f04db9"}, + {file = "python_box-7.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a760074ba12ccc247796f43b6c61f686ada4b8349ab59e2a6303b27f3ae082"}, + {file = "python_box-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ea436e7ff5f87bd728472f1e31a9e6e95572c81028c44a8e00097e0968955638"}, + {file = "python_box-7.3.2-py3-none-any.whl", hash = "sha256:fd7d74d5a848623f93b5221fd9fb00b8c00ff0e130fa87f396277aa188659c92"}, + {file = "python_box-7.3.2.tar.gz", hash = "sha256:028b9917129e67f311932d93347b8a4f1b500d7a5a2870ee3c035f4e7b19403b"}, ] [package.extras] @@ -2176,7 +2373,7 @@ msgpack = ["msgpack"] pyyaml = ["PyYAML"] ruamel-yaml = ["ruamel.yaml (>=0.17)"] toml = ["toml"] -tomli = ["tomli", "tomli-w"] +tomli = ["tomli ; python_version < \"3.11\"", "tomli-w"] yaml = ["ruamel.yaml (>=0.17)"] [[package]] @@ -2185,6 +2382,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -2195,13 +2393,14 @@ six = ">=1.5" [[package]] name = "python-gitlab" -version = "5.1.0" -description = "A python wrapper for the GitLab API" +version = "5.6.0" +description = "The python wrapper for the GitLab REST and GraphQL APIs." optional = false python-versions = ">=3.9.0" +groups = ["main"] files = [ - {file = "python_gitlab-5.1.0-py3-none-any.whl", hash = "sha256:c30cf547392ce66daaaf020839cfb6c15a91b26e2e7054d1b3f1b92e8dd65e7d"}, - {file = "python_gitlab-5.1.0.tar.gz", hash = "sha256:d5a10dae8328f32fb9214bd3f9dc199b4930cd496f81c9be42a0f8ff338aeb35"}, + {file = "python_gitlab-5.6.0-py3-none-any.whl", hash = "sha256:68980cd70929fc7f8f06d8a7b09bd046a6b79e1995c19d61249f046005099100"}, + {file = "python_gitlab-5.6.0.tar.gz", hash = "sha256:bc531e8ba3e5641b60409445d4919ace68a2c18cb0ec6d48fbced6616b954166"}, ] [package.dependencies] @@ -2215,13 +2414,14 @@ yaml = ["PyYaml (>=6.0.1)"] [[package]] name = "python-jsonpath" -version = "1.1.1" +version = "1.3.0" description = "JSONPath, JSON Pointer and JSON Patch for Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "python_jsonpath-1.1.1-py3-none-any.whl", hash = "sha256:43f2622b7aaaf4f45dd873e80cfd181058503e08ffdeac5218135f3a97bd0aec"}, - {file = "python_jsonpath-1.1.1.tar.gz", hash = "sha256:d2944e1f7a1d6c8fa958724f9570b8f04a4e00ab6bf1e4733346ab8dcef1f74f"}, + {file = "python_jsonpath-1.3.0-py3-none-any.whl", hash = "sha256:ce586ec5bd934ce97bc2f06600b00437d9684138b77273ced5b70694a8ef3a76"}, + {file = "python_jsonpath-1.3.0.tar.gz", hash = "sha256:ea5eb4d9b1296c8c19cc53538eb0f20fc54128f84571559ee63539e57875fefe"}, ] [[package]] @@ -2230,6 +2430,7 @@ version = "3.0.0" description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -2238,98 +2439,76 @@ files = [ [package.extras] pydantic = ["pydantic (>=2.0)"] -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "redis" -version = "5.2.0" -description = "Python client for Redis database and key-value store" -optional = false python-versions = ">=3.8" -files = [ - {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, - {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, +groups = ["main"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -[package.extras] -hiredis = ["hiredis (>=3.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] - [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2351,6 +2530,7 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -2369,6 +2549,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -2379,104 +2560,29 @@ requests = ">=2.0.1,<3.0.0" [[package]] name = "rsa" -version = "4.9" +version = "4.9.1" description = "Pure-Python RSA implementation" optional = false -python-versions = ">=3.6,<4" +python-versions = "<4,>=3.6" +groups = ["main"] files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, ] [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "ruamel-yaml" -version = "0.18.6" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} - -[package.extras] -docs = ["mercurial (>5.7)", "ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.8" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, -] - [[package]] name = "sanic" -version = "24.6.0" +version = "24.12.0" description = "A web server and web framework that's written to go fast. Build fast. Run fast." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "sanic-24.6.0-py3-none-any.whl", hash = "sha256:e2c6b392e213d85d9843cf27c64e3f2dacb3ec5c31c8c7ade4c404cd3030e994"}, - {file = "sanic-24.6.0.tar.gz", hash = "sha256:2e0841e2c8c28e68a0e6fc570c42aafbbe3b385d7141b9f96997d9d6c17d7afb"}, + {file = "sanic-24.12.0-py3-none-any.whl", hash = "sha256:3c2a01ec0b6c5926e3efe34eac1b497d31ed989038fe213eb25ad0c98687d388"}, + {file = "sanic-24.12.0.tar.gz", hash = "sha256:09c23aa917616c1e60e44c66dfd7582cb9fd6503f78298c309945909f5839836"}, ] [package.dependencies] @@ -2494,30 +2600,31 @@ uvloop = {version = ">=0.15.0", markers = "sys_platform != \"win32\" and impleme websockets = ">=10.0" [package.extras] -all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)", "towncrier", "tox", "types-ujson", "uvicorn"] -dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson", "uvicorn"] -docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)"] +all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)"] ext = ["sanic-ext"] http3 = ["aioquic"] -test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson", "uvicorn"] +test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] [[package]] name = "sanic-ext" -version = "23.12.0" +version = "24.12.0" description = "Extend your Sanic installation with some core functionality." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "sanic-ext-23.12.0.tar.gz", hash = "sha256:42fc41e7fafa58f3b790f685f3dd8a8de281460b4169d0e91f4e11b8747f845c"}, - {file = "sanic_ext-23.12.0-py3-none-any.whl", hash = "sha256:3ba2c143d7c41d89b87a11c6214b9d9b52c3994ff8ce3a03792b54ec5627e2c3"}, + {file = "sanic_ext-24.12.0-py3-none-any.whl", hash = "sha256:861f809f071770cf28acd5f13e97ed59985e07361b13b4b4540da1333730c83e"}, + {file = "sanic_ext-24.12.0.tar.gz", hash = "sha256:8f912f4c29f242bc638346d09b79f0c8896ff64e79bd0e7fa09eac4b6c0e23c8"}, ] [package.dependencies] pyyaml = ">=3.0.0" [package.extras] -dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] -test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] +dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] +test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] [[package]] name = "sanic-routing" @@ -2525,6 +2632,7 @@ version = "23.12.0" description = "Core routing component for Sanic" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}, {file = "sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}, @@ -2532,13 +2640,14 @@ files = [ [[package]] name = "sentry-sdk" -version = "2.19.0" +version = "2.26.1" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"}, - {file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"}, + {file = "sentry_sdk-2.26.1-py2.py3-none-any.whl", hash = "sha256:e99390e3f217d13ddcbaeaed08789f1ca614d663b345b9da42e35ad6b60d696a"}, + {file = "sentry_sdk-2.26.1.tar.gz", hash = "sha256:759e019c41551a21519a95e6cef6d91fb4af1054761923dadaee2e6eca9c02c7"}, ] [package.dependencies] @@ -2583,37 +2692,41 @@ sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] starlette = ["starlette (>=0.19.1)"] starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] [[package]] name = "setuptools" -version = "75.6.0" +version = "75.9.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, + {file = "setuptools-75.9.1-py3-none-any.whl", hash = "sha256:0a6f876d62f4d978ca1a11ab4daf728d1357731f978543ff18ecdbf9fd071f73"}, + {file = "setuptools-75.9.1.tar.gz", hash = "sha256:b6eca2c3070cdc82f71b4cb4bb2946bc0760a210d11362278cf1ff394e6ea32c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -2622,98 +2735,89 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - [[package]] name = "sqlalchemy" -version = "2.0.36" +version = "2.0.40" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -2724,7 +2828,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -2735,13 +2839,14 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "tenacity" -version = "9.0.0" +version = "9.1.2" description = "Retry code until it succeeds" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, ] [package.extras] @@ -2754,6 +2859,7 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -2765,6 +2871,7 @@ version = "1.1.1" description = "Human-readable HTML tracebacks for Python exceptions" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "tracerite-1.1.1-py3-none-any.whl", hash = "sha256:3a787a9ecb1a136ea9ce17e6328e414ec414a4f644130af4e1e330bec2dece29"}, {file = "tracerite-1.1.1.tar.gz", hash = "sha256:6400a35a187747189e4bb8d4a8e471bd86d14dbdcc94bcad23f4eda023f41356"}, @@ -2773,26 +2880,68 @@ files = [ [package.dependencies] html5tagger = ">=1.2.1" +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20241206" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250402" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, +] + [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, ] +[package.dependencies] +typing-extensions = ">=4.12.0" + [[package]] name = "tzdata" -version = "2024.1" +version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] @@ -2801,6 +2950,8 @@ version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" files = [ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, @@ -2888,6 +3039,7 @@ version = "0.11.3" description = "Type-checked function calls at runtime" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "undictify-0.11.3-py3-none-any.whl", hash = "sha256:4bfdc075b2f06ee027b05e241434c8efcbebf6c83fcc5b8d9d8def56dab4b5ff"}, {file = "undictify-0.11.3.tar.gz", hash = "sha256:1481170ed8b9862c033e7549d817b90cead6002677c602d1bbdbf8ea15100098"}, @@ -2895,17 +3047,18 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2916,6 +3069,8 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -2967,6 +3122,7 @@ version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -2979,83 +3135,81 @@ test = ["websockets"] [[package]] name = "websockets" -version = "12.0" +version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] [[package]] @@ -3064,6 +3218,7 @@ version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -3081,6 +3236,7 @@ version = "1.2.0" description = "WebSockets state-machine based protocol implementation" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, @@ -3091,108 +3247,124 @@ h11 = ">=0.9.0,<1" [[package]] name = "yarl" -version = "1.9.4" +version = "1.20.0" description = "Yet another URL library" optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a8c1df72eb746f4136fe9a2e72b0c9dc1da1cbd23b5372f94b5820ff8ae30e0e"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a3a6ed1d525bfb91b3fc9b690c5a21bb52de28c018530ad85093cc488bee2dd2"}, - {file = "yarl-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c38c9ddb6103ceae4e4498f9c08fac9b590c5c71b0370f98714768e22ac6fa66"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d9e09c9d74f4566e905a0b8fa668c58109f7624db96a2171f21747abc7524234"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8477c1ee4bd47c57d49621a062121c3023609f7a13b8a46953eb6c9716ca392"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5ff2c858f5f6a42c2a8e751100f237c5e869cbde669a724f2062d4c4ef93551"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:357495293086c5b6d34ca9616a43d329317feab7917518bc97a08f9e55648455"}, - {file = "yarl-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:54525ae423d7b7a8ee81ba189f131054defdb122cde31ff17477951464c1691c"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:801e9264d19643548651b9db361ce3287176671fb0117f96b5ac0ee1c3530d53"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e516dc8baf7b380e6c1c26792610230f37147bb754d6426462ab115a02944385"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7d5aaac37d19b2904bb9dfe12cdb08c8443e7ba7d2852894ad448d4b8f442863"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:54beabb809ffcacbd9d28ac57b0db46e42a6e341a030293fb3185c409e626b8b"}, - {file = "yarl-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bac8d525a8dbc2a1507ec731d2867025d11ceadcb4dd421423a5d42c56818541"}, - {file = "yarl-1.9.4-cp310-cp310-win32.whl", hash = "sha256:7855426dfbddac81896b6e533ebefc0af2f132d4a47340cee6d22cac7190022d"}, - {file = "yarl-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:848cd2a1df56ddbffeb375535fb62c9d1645dde33ca4d51341378b3f5954429b"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:35a2b9396879ce32754bd457d31a51ff0a9d426fd9e0e3c33394bf4b9036b099"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c7d56b293cc071e82532f70adcbd8b61909eec973ae9d2d1f9b233f3d943f2c"}, - {file = "yarl-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d8a1c6c0be645c745a081c192e747c5de06e944a0d21245f4cf7c05e457c36e0"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b3c1ffe10069f655ea2d731808e76e0f452fc6c749bea04781daf18e6039525"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:549d19c84c55d11687ddbd47eeb348a89df9cb30e1993f1b128f4685cd0ebbf8"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a7409f968456111140c1c95301cadf071bd30a81cbd7ab829169fb9e3d72eae9"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23a6d84d9d1738dbc6e38167776107e63307dfc8ad108e580548d1f2c587f42"}, - {file = "yarl-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8b889777de69897406c9fb0b76cdf2fd0f31267861ae7501d93003d55f54fbe"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:03caa9507d3d3c83bca08650678e25364e1843b484f19986a527630ca376ecce"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4e9035df8d0880b2f1c7f5031f33f69e071dfe72ee9310cfc76f7b605958ceb9"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:c0ec0ed476f77db9fb29bca17f0a8fcc7bc97ad4c6c1d8959c507decb22e8572"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:ee04010f26d5102399bd17f8df8bc38dc7ccd7701dc77f4a68c5b8d733406958"}, - {file = "yarl-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49a180c2e0743d5d6e0b4d1a9e5f633c62eca3f8a86ba5dd3c471060e352ca98"}, - {file = "yarl-1.9.4-cp311-cp311-win32.whl", hash = "sha256:81eb57278deb6098a5b62e88ad8281b2ba09f2f1147c4767522353eaa6260b31"}, - {file = "yarl-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:d1d2532b340b692880261c15aee4dc94dd22ca5d61b9db9a8a361953d36410b1"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0d2454f0aef65ea81037759be5ca9947539667eecebca092733b2eb43c965a81"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:44d8ffbb9c06e5a7f529f38f53eda23e50d1ed33c6c869e01481d3fafa6b8142"}, - {file = "yarl-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aaaea1e536f98754a6e5c56091baa1b6ce2f2700cc4a00b0d49eca8dea471074"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3777ce5536d17989c91696db1d459574e9a9bd37660ea7ee4d3344579bb6f129"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fc5fc1eeb029757349ad26bbc5880557389a03fa6ada41703db5e068881e5f2"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea65804b5dc88dacd4a40279af0cdadcfe74b3e5b4c897aa0d81cf86927fee78"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa102d6d280a5455ad6a0f9e6d769989638718e938a6a0a2ff3f4a7ff8c62cc4"}, - {file = "yarl-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09efe4615ada057ba2d30df871d2f668af661e971dfeedf0c159927d48bbeff0"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:008d3e808d03ef28542372d01057fd09168419cdc8f848efe2804f894ae03e51"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6f5cb257bc2ec58f437da2b37a8cd48f666db96d47b8a3115c29f316313654ff"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:992f18e0ea248ee03b5a6e8b3b4738850ae7dbb172cc41c966462801cbf62cf7"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:0e9d124c191d5b881060a9e5060627694c3bdd1fe24c5eecc8d5d7d0eb6faabc"}, - {file = "yarl-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3986b6f41ad22988e53d5778f91855dc0399b043fc8946d4f2e68af22ee9ff10"}, - {file = "yarl-1.9.4-cp312-cp312-win32.whl", hash = "sha256:4b21516d181cd77ebd06ce160ef8cc2a5e9ad35fb1c5930882baff5ac865eee7"}, - {file = "yarl-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a9bd00dc3bc395a662900f33f74feb3e757429e545d831eef5bb280252631984"}, - {file = "yarl-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:63b20738b5aac74e239622d2fe30df4fca4942a86e31bf47a81a0e94c14df94f"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7d7f7de27b8944f1fee2c26a88b4dabc2409d2fea7a9ed3df79b67277644e17"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c74018551e31269d56fab81a728f683667e7c28c04e807ba08f8c9e3bba32f14"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ca06675212f94e7a610e85ca36948bb8fc023e458dd6c63ef71abfd482481aa5"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aef935237d60a51a62b86249839b51345f47564208c6ee615ed2a40878dccdd"}, - {file = "yarl-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b134fd795e2322b7684155b7855cc99409d10b2e408056db2b93b51a52accc7"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d25039a474c4c72a5ad4b52495056f843a7ff07b632c1b92ea9043a3d9950f6e"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f7d6b36dd2e029b6bcb8a13cf19664c7b8e19ab3a58e0fefbb5b8461447ed5ec"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:957b4774373cf6f709359e5c8c4a0af9f6d7875db657adb0feaf8d6cb3c3964c"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d7eeb6d22331e2fd42fce928a81c697c9ee2d51400bd1a28803965883e13cead"}, - {file = "yarl-1.9.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:6a962e04b8f91f8c4e5917e518d17958e3bdee71fd1d8b88cdce74dd0ebbf434"}, - {file = "yarl-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:f3bc6af6e2b8f92eced34ef6a96ffb248e863af20ef4fde9448cc8c9b858b749"}, - {file = "yarl-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4d7a90a92e528aadf4965d685c17dacff3df282db1121136c382dc0b6014d2"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ec61d826d80fc293ed46c9dd26995921e3a82146feacd952ef0757236fc137be"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8be9e837ea9113676e5754b43b940b50cce76d9ed7d2461df1af39a8ee674d9f"}, - {file = "yarl-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bef596fdaa8f26e3d66af846bbe77057237cb6e8efff8cd7cc8dff9a62278bbf"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d47552b6e52c3319fede1b60b3de120fe83bde9b7bddad11a69fb0af7db32f1"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84fc30f71689d7fc9168b92788abc977dc8cefa806909565fc2951d02f6b7d57"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4aa9741085f635934f3a2583e16fcf62ba835719a8b2b28fb2917bb0537c1dfa"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:206a55215e6d05dbc6c98ce598a59e6fbd0c493e2de4ea6cc2f4934d5a18d130"}, - {file = "yarl-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07574b007ee20e5c375a8fe4a0789fad26db905f9813be0f9fef5a68080de559"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5a2e2433eb9344a163aced6a5f6c9222c0786e5a9e9cac2c89f0b28433f56e23"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6ad6d10ed9b67a382b45f29ea028f92d25bc0bc1daf6c5b801b90b5aa70fb9ec"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6fe79f998a4052d79e1c30eeb7d6c1c1056ad33300f682465e1b4e9b5a188b78"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a825ec844298c791fd28ed14ed1bffc56a98d15b8c58a20e0e08c1f5f2bea1be"}, - {file = "yarl-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8619d6915b3b0b34420cf9b2bb6d81ef59d984cb0fde7544e9ece32b4b3043c3"}, - {file = "yarl-1.9.4-cp38-cp38-win32.whl", hash = "sha256:686a0c2f85f83463272ddffd4deb5e591c98aac1897d65e92319f729c320eece"}, - {file = "yarl-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:a00862fb23195b6b8322f7d781b0dc1d82cb3bcac346d1e38689370cc1cc398b"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:604f31d97fa493083ea21bd9b92c419012531c4e17ea6da0f65cacdcf5d0bd27"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8a854227cf581330ffa2c4824d96e52ee621dd571078a252c25e3a3b3d94a1b1"}, - {file = "yarl-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ba6f52cbc7809cd8d74604cce9c14868306ae4aa0282016b641c661f981a6e91"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6327976c7c2f4ee6816eff196e25385ccc02cb81427952414a64811037bbc8b"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8397a3817d7dcdd14bb266283cd1d6fc7264a48c186b986f32e86d86d35fbac5"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0381b4ce23ff92f8170080c97678040fc5b08da85e9e292292aba67fdac6c34"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23d32a2594cb5d565d358a92e151315d1b2268bc10f4610d098f96b147370136"}, - {file = "yarl-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ddb2a5c08a4eaaba605340fdee8fc08e406c56617566d9643ad8bf6852778fc7"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:26a1dc6285e03f3cc9e839a2da83bcbf31dcb0d004c72d0730e755b33466c30e"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:18580f672e44ce1238b82f7fb87d727c4a131f3a9d33a5e0e82b793362bf18b4"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:29e0f83f37610f173eb7e7b5562dd71467993495e568e708d99e9d1944f561ec"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:1f23e4fe1e8794f74b6027d7cf19dc25f8b63af1483d91d595d4a07eca1fb26c"}, - {file = "yarl-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:db8e58b9d79200c76956cefd14d5c90af54416ff5353c5bfd7cbe58818e26ef0"}, - {file = "yarl-1.9.4-cp39-cp39-win32.whl", hash = "sha256:c7224cab95645c7ab53791022ae77a4509472613e839dab722a72abe5a684575"}, - {file = "yarl-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:824d6c50492add5da9374875ce72db7a0733b29c2394890aef23d533106e2b15"}, - {file = "yarl-1.9.4-py3-none-any.whl", hash = "sha256:928cecb0ef9d5a7946eb6ff58417ad2fe9375762382f1bf5c55e61645f2c43ad"}, - {file = "yarl-1.9.4.tar.gz", hash = "sha256:566db86717cf8080b99b58b083b773a908ae40f06681e87e589a976faf8246bf"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19"}, + {file = "yarl-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d"}, + {file = "yarl-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5"}, + {file = "yarl-1.20.0-cp311-cp311-win32.whl", hash = "sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6"}, + {file = "yarl-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b"}, + {file = "yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64"}, + {file = "yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384"}, + {file = "yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62"}, + {file = "yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f"}, + {file = "yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac"}, + {file = "yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0"}, + {file = "yarl-1.20.0-cp39-cp39-win32.whl", hash = "sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8"}, + {file = "yarl-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7"}, + {file = "yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124"}, + {file = "yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307"}, ] [package.dependencies] idna = ">=2.0" multidict = ">=4.0" +propcache = ">=0.2.1" [metadata] -lock-version = "2.0" -python-versions = "^3.12" -content-hash = "176a77d0b160abe7d857c173d85846148ad3a70557c7825f7463163a50fcdbc2" +lock-version = "2.1" +python-versions = "^3.13" +content-hash = "7f5f30247d45caad40a3326283b944904847ca7433a4558e9029182620486073" diff --git a/projects/renku_data_service/pyproject.toml b/projects/renku_data_service/pyproject.toml index 06b19fe67..693811cf8 100644 --- a/projects/renku_data_service/pyproject.toml +++ b/projects/renku_data_service/pyproject.toml @@ -1,10 +1,15 @@ -[tool.poetry] +[project] name = "renku_data_service" version = "0.1.0" description = "" -authors = ['Swiss Data Science Center '] -license = "Apache License 2.0" +authors = [ + { name = "Swiss Data Science Center", email = "contact@datascience.ch" }, +] +license = "" +requires-python = ">=3.13" +dynamic = ["dependencies"] +[tool.poetry] packages = [ { include = "renku_data_services/data_api", from = "../../bases" }, { include = "renku_data_services/app_config", from = "../../components" }, @@ -19,6 +24,7 @@ packages = [ { include = "renku_data_services/errors", from = "../../components" }, { include = "renku_data_services/git", from = "../../components" }, { include = "renku_data_services/k8s", from = "../../components" }, + { include = "renku_data_services/k8s_watcher", from = "../../components" }, { include = "renku_data_services/message_queue", from = "../../components" }, { include = "renku_data_services/namespace", from = "../../components" }, { include = "renku_data_services/platform", from = "../../components" }, @@ -33,50 +39,54 @@ packages = [ { include = "renku_data_services/notebooks", from = "../../components" }, # Note: poetry poly does not detect the migrations as dependencies, but they are. Don't remove these! { include = "renku_data_services/migrations", from = "../../components" }, + { include = "renku_data_services/solr", from = "../../components" }, + { include = "renku_data_services/search", from = "../../components" }, + { include = "renku_data_services/metrics", from = "../../components" }, ] [tool.poetry.dependencies] -python = "^3.12" -sanic = { extras = ["ext"], version = "^24.6.0" } -pydantic = {extras = ["email"], version = "^2.10.2"} +python = "^3.13" +sanic = { extras = ["ext"], version = "^24.12.0" } +pydantic = { extras = ["email"], version = "^2.10.6" } datamodel-code-generator = "^0.24.2" -sqlalchemy = { extras = ["asyncio"], version = "^2.0.36" } -alembic = "^1.14.0" +sqlalchemy = { extras = ["asyncio"], version = "^2.0.38" } +alembic = "^1.14.1" asyncpg = "^0.30.0" pyjwt = { extras = ["crypto"], version = "^2.10.1" } tenacity = "^9.0.0" httpx = "<0.29" kubernetes = "^31.0.0" python-ulid = "^3.0.0" -python-gitlab = "^5.1.0" +python-gitlab = "^5.6.0" psycopg = { version = "^3.2.3", extras = ["binary"] } -urllib3 = "^2.2.3" +urllib3 = "^2.3.0" deepmerge = "^2.0" -authlib = "^1.3.2" -redis = "^5.2.0" -dataclasses-avroschema = "^0.65.4" +authlib = "^1.5.0" undictify = "^0.11.3" prometheus-sanic = "^3.0.0" -sentry-sdk = {version = "^2.19.0", extras = ["sanic"]} -authzed = "^1.1.0" +sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } +authzed = "^1.20.0" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore -setuptools = { version = "^75.6.0" } +setuptools = { version = "^75.8.2" } aiofile = "^3.9.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. -protobuf = "^5.29.0" -cryptography = "^44.0.0" -kubernetes-asyncio = "^31.1.0" -marshmallow = "^3.23.1" +protobuf = "^5.29.3" +cryptography = "^44.0.1" +kubernetes-asyncio = "^32.0.0" +marshmallow = "^3.26.1" escapism = "^1.0.1" -kr8s = "^0.18.1" +kr8s = "^0.20.7" +python-box = "^7.0.1" werkzeug = "^3.1.3" toml = "^0.10.2" +parsy = "^2.1" +sanic-ext = "^24.12.0" +posthog = "^3.21.0" +markdown-code-runner = "^2.2.0" [tool.poetry.group.dev.dependencies] -pyavro-gen = "^0.3.3" -fakeredis = "^2.26.1" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/projects/background_jobs/Dockerfile b/projects/renku_data_tasks/Dockerfile similarity index 77% rename from projects/background_jobs/Dockerfile rename to projects/renku_data_tasks/Dockerfile index ccf1ef77f..0862fa86e 100644 --- a/projects/background_jobs/Dockerfile +++ b/projects/renku_data_tasks/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-bookworm as builder +FROM python:3.13-bookworm AS builder ARG DEV_BUILD=false ARG USER_UID=1000 ARG USER_GID=$USER_UID @@ -13,7 +13,8 @@ RUN python3 -m pip install --user pipx && \ /home/renku/.local/bin/pipx install virtualenv && \ /home/renku/.local/bin/virtualenv env && \ /home/renku/.local/bin/poetry self add poetry-multiproject-plugin && \ - /home/renku/.local/bin/poetry self add poetry-polylith-plugin + /home/renku/.local/bin/poetry self add poetry-polylith-plugin && \ + /home/renku/.local/bin/poetry self add poetry-plugin-export COPY --chown=$USER_UID:$USER_GID . . RUN if $DEV_BUILD ; then \ /home/renku/.local/bin/poetry export -o requirements.txt --with dev; \ @@ -21,10 +22,10 @@ RUN if $DEV_BUILD ; then \ /home/renku/.local/bin/poetry export -o requirements.txt; \ fi && \ env/bin/pip install -r requirements.txt -RUN /home/renku/.local/bin/poetry build-project -f wheel -C projects/background_jobs -RUN env/bin/pip --no-cache-dir install projects/background_jobs/dist/*.whl +RUN /home/renku/.local/bin/poetry -C projects/renku_data_tasks build-project -f wheel --custom-temp-path=/tmp +RUN env/bin/pip --no-cache-dir install projects/renku_data_tasks/dist/*.whl -FROM python:3.12-slim-bookworm +FROM python:3.13-slim-bookworm ARG USER_UID=1000 ARG USER_GID=$USER_UID RUN apt-get update && apt-get install -y \ @@ -35,4 +36,4 @@ RUN apt-get update && apt-get install -y \ USER $USER_UID:$USER_GID WORKDIR /app COPY --from=builder /app/env ./env -ENTRYPOINT ["tini", "-g", "--", "env/bin/python", "-m", "renku_data_services.background_jobs.main"] +ENTRYPOINT ["tini", "-g", "--", "env/bin/python", "-m", "renku_data_services.data_tasks.main"] diff --git a/projects/renku_data_tasks/poetry.lock b/projects/renku_data_tasks/poetry.lock new file mode 100644 index 000000000..94226e83a --- /dev/null +++ b/projects/renku_data_tasks/poetry.lock @@ -0,0 +1,3373 @@ +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. + +[[package]] +name = "aiofile" +version = "3.9.0" +description = "Asynchronous file operations." +optional = false +python-versions = "<4,>=3.8" +groups = ["main"] +files = [ + {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, + {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, +] + +[package.dependencies] +caio = ">=0.9.0,<0.10.0" + +[[package]] +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.11.18" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868"}, + {file = "aiohttp-3.11.18-cp310-cp310-win32.whl", hash = "sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f"}, + {file = "aiohttp-3.11.18-cp310-cp310-win_amd64.whl", hash = "sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd"}, + {file = "aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d"}, + {file = "aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea"}, + {file = "aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8"}, + {file = "aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7"}, + {file = "aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78"}, + {file = "aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935"}, + {file = "aiohttp-3.11.18-cp39-cp39-win32.whl", hash = "sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc"}, + {file = "aiohttp-3.11.18-cp39-cp39-win_amd64.whl", hash = "sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef"}, + {file = "aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.3.2" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "alembic" +version = "1.15.2" +description = "A database migration tool for SQLAlchemy." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "alembic-1.15.2-py3-none-any.whl", hash = "sha256:2e76bd916d547f6900ec4bb5a90aeac1485d2c92536923d0b138c02b126edc53"}, + {file = "alembic-1.15.2.tar.gz", hash = "sha256:1c72391bbdeffccfe317eefba686cb9a3c078005478885413b95c3b26c57a8a7"}, +] + +[package.dependencies] +Mako = "*" +SQLAlchemy = ">=1.4.0" +typing-extensions = ">=4.12" + +[package.extras] +tz = ["tzdata"] + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.9.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "argcomplete" +version = "3.6.2" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, + {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, +] + +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + +[[package]] +name = "asyncache" +version = "0.3.1" +description = "Helpers to use cachetools with async code." +optional = false +python-versions = ">=3.8,<4.0" +groups = ["main"] +files = [ + {file = "asyncache-0.3.1-py3-none-any.whl", hash = "sha256:ef20a1024d265090dd1e0785c961cf98b9c32cc7d9478973dcf25ac1b80011f5"}, + {file = "asyncache-0.3.1.tar.gz", hash = "sha256:9a1e60a75668e794657489bdea6540ee7e3259c483517b934670db7600bf5035"}, +] + +[package.dependencies] +cachetools = ">=5.2.0,<6.0.0" + +[[package]] +name = "asyncpg" +version = "0.30.0" +description = "An asyncio PostgreSQL driver" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, + {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f"}, + {file = "asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75"}, + {file = "asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f"}, + {file = "asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf"}, + {file = "asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a"}, + {file = "asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a"}, + {file = "asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056"}, + {file = "asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454"}, + {file = "asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d"}, + {file = "asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e"}, + {file = "asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3"}, + {file = "asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a"}, + {file = "asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af"}, + {file = "asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e"}, + {file = "asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70"}, + {file = "asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33"}, + {file = "asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4"}, + {file = "asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba"}, + {file = "asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590"}, + {file = "asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:29ff1fc8b5bf724273782ff8b4f57b0f8220a1b2324184846b39d1ab4122031d"}, + {file = "asyncpg-0.30.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64e899bce0600871b55368b8483e5e3e7f1860c9482e7f12e0a771e747988168"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b290f4726a887f75dcd1b3006f484252db37602313f806e9ffc4e5996cfe5cb"}, + {file = "asyncpg-0.30.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f86b0e2cd3f1249d6fe6fd6cfe0cd4538ba994e2d8249c0491925629b9104d0f"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:393af4e3214c8fa4c7b86da6364384c0d1b3298d45803375572f415b6f673f38"}, + {file = "asyncpg-0.30.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:fd4406d09208d5b4a14db9a9dbb311b6d7aeeab57bded7ed2f8ea41aeef39b34"}, + {file = "asyncpg-0.30.0-cp38-cp38-win32.whl", hash = "sha256:0b448f0150e1c3b96cb0438a0d0aa4871f1472e58de14a3ec320dbb2798fb0d4"}, + {file = "asyncpg-0.30.0-cp38-cp38-win_amd64.whl", hash = "sha256:f23b836dd90bea21104f69547923a02b167d999ce053f3d502081acea2fba15b"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f4e83f067b35ab5e6371f8a4c93296e0439857b4569850b178a01385e82e9ad"}, + {file = "asyncpg-0.30.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5df69d55add4efcd25ea2a3b02025b669a285b767bfbf06e356d68dbce4234ff"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3479a0d9a852c7c84e822c073622baca862d1217b10a02dd57ee4a7a081f708"}, + {file = "asyncpg-0.30.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26683d3b9a62836fad771a18ecf4659a30f348a561279d6227dab96182f46144"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1b982daf2441a0ed314bd10817f1606f1c28b1136abd9e4f11335358c2c631cb"}, + {file = "asyncpg-0.30.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1c06a3a50d014b303e5f6fc1e5f95eb28d2cee89cf58384b700da621e5d5e547"}, + {file = "asyncpg-0.30.0-cp39-cp39-win32.whl", hash = "sha256:1b11a555a198b08f5c4baa8f8231c74a366d190755aa4f99aacec5970afe929a"}, + {file = "asyncpg-0.30.0-cp39-cp39-win_amd64.whl", hash = "sha256:8b684a3c858a83cd876f05958823b68e8d14ec01bb0c0d14a6704c5bf9711773"}, + {file = "asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851"}, +] + +[package.extras] +docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "authlib" +version = "1.6.0" +description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d"}, + {file = "authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210"}, +] + +[package.dependencies] +cryptography = "*" + +[[package]] +name = "authzed" +version = "1.21.1" +description = "Client library for SpiceDB." +optional = false +python-versions = "<4.0,>=3.9" +groups = ["main"] +files = [ + {file = "authzed-1.21.1-py3-none-any.whl", hash = "sha256:9a830c0e9eefc506181f0d82c9a9f73405db46d50e8ecaedd4488486a2792959"}, + {file = "authzed-1.21.1.tar.gz", hash = "sha256:c354d19af5ef1a393381d5be670dd946916742573ae2bf3ac87becdbf44f093b"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.65.0,<2.0.0" +grpc-interceptor = ">=0.15.4,<0.16.0" +grpcio = ">=1.63,<2.0" +protobuf = ">=5.26,<6" +protovalidate = ">=0.7.1,<0.8.0" + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "black" +version = "25.1.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.10)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cachetools" +version = "5.5.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, +] + +[[package]] +name = "caio" +version = "0.9.22" +description = "Asynchronous file IO for Linux MacOS or Windows." +optional = false +python-versions = "<4,>=3.7" +groups = ["main"] +files = [ + {file = "caio-0.9.22-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:979bef84869822a0b1d10c99f7240e2ca8b00c138a54bec1fcbef1163a6bc976"}, + {file = "caio-0.9.22-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:bef2533b1444ce80df47ecce25ad8def6eb76a5ba8c1457074a16f5ab6e12670"}, + {file = "caio-0.9.22-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:76fe5d98ff55099ec61a1863eed53ef353ea815e529c19a284865af73b3c84a3"}, + {file = "caio-0.9.22-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd99c28a85809d0954211d637dd0e2e5c0e385dcdfbdbc4ed914b549d7e0fb69"}, + {file = "caio-0.9.22-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:6ee67c4d63b77b3d07715c41939b71fee1bbb986219e398f3420ac31b43e65a8"}, + {file = "caio-0.9.22-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fb335891181107bdb8f96dc133fc8e2612b62cd270219333659d6d53f75b1770"}, + {file = "caio-0.9.22-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:25aae4c3b846eeca63c7d75d63b3fd8deaba9650ef1168d8a239e6a927299ded"}, + {file = "caio-0.9.22-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:ac332b8e2c8e0840fe10ee6971b38f8dac6ad64ecf6087ee3f70cd376f511699"}, + {file = "caio-0.9.22-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:6f991e4812fd2d6e87f91ff78fcc7d4f299bd98765334756580d4ea42cad89f1"}, + {file = "caio-0.9.22-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c3d29f0f2f3a577b58794960dbfc78ef124a8b3e238d653a4c11c57f20651e34"}, + {file = "caio-0.9.22-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:a3307e064f423675c4ffd2608ecc86bc47652ec1547dab5f94b72d9599c23201"}, + {file = "caio-0.9.22-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:160a1dad60bbb4268811f88f817205fab6cba8fb50801fdd3d865b69999ea924"}, + {file = "caio-0.9.22-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:ceb32307d6d3cfa512220b53b578833ebd27f8ed0b534f15e3e88e6d598bffe7"}, + {file = "caio-0.9.22-cp38-cp38-manylinux_2_34_aarch64.whl", hash = "sha256:7145f2c47233af12cc0194a7bfe584c632b16fa00d3f900806eda527f89ce93b"}, + {file = "caio-0.9.22-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:3bedd3e8ccc485cb20d4d56b0af1471e8f8b5ca3f0b5f1f21ebd087bfdcd21a7"}, + {file = "caio-0.9.22-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ae19a37b7294dc15c8775ec657464a4370d1f8351d0fc3a284a9f014514b9be5"}, + {file = "caio-0.9.22-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:70e6a12c9d1db8fb7a07c5193ca352df1488787f1cf1c3814879dec754135031"}, + {file = "caio-0.9.22-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:3f2f7f22c60844165359a285c5d38ca0d4ab10ca6f6def79abf6243d019df64b"}, + {file = "caio-0.9.22.tar.gz", hash = "sha256:7ea533d90e5fa0bba33bc8f4805b4c90f19e8d8ac5139a8033b92a6ab5c4012b"}, +] + +[package.extras] +develop = ["aiomisc-pytest", "pytest", "pytest-cov"] + +[[package]] +name = "cel-python" +version = "0.2.0" +description = "Pure Python implementation of Google Common Expression Language" +optional = false +python-versions = "<4.0,>=3.8" +groups = ["main"] +files = [ + {file = "cel_python-0.2.0-py3-none-any.whl", hash = "sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60"}, + {file = "cel_python-0.2.0.tar.gz", hash = "sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f"}, +] + +[package.dependencies] +jmespath = ">=1.0.1,<2.0.0" +lark = ">=0.12.0,<0.13.0" +python-dateutil = ">=2.9.0.post0,<3.0.0" +pyyaml = ">=6.0.1,<7.0.0" +types-python-dateutil = ">=2.9.0.20240316,<3.0.0.0" +types-pyyaml = ">=6.0.12.20240311,<7.0.0.0" + +[[package]] +name = "certifi" +version = "2025.1.31" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, +] + +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "cryptography" +version = "44.0.2" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +optional = false +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +files = [ + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, +] + +[package.dependencies] +cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} + +[package.extras] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test-randomorder = ["pytest-randomly"] + +[[package]] +name = "datamodel-code-generator" +version = "0.24.2" +description = "Datamodel Code Generator" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "datamodel_code_generator-0.24.2-py3-none-any.whl", hash = "sha256:582c30466def12600d7165c5f624bb63a7e944eeaf8320f282518daf9ccb566c"}, + {file = "datamodel_code_generator-0.24.2.tar.gz", hash = "sha256:d278c751038c8911efc82856ec549ac1e3e13134567387a4bb5ab7ddc6543162"}, +] + +[package.dependencies] +argcomplete = ">=1.10,<4.0" +black = ">=19.10b0" +genson = ">=1.2.1,<2.0" +inflect = ">=4.1.0,<6.0" +isort = ">=4.3.21,<6.0" +jinja2 = ">=2.10.1,<4.0" +packaging = "*" +pydantic = {version = ">=1.10.0,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.4.0 || >2.4.0,<3.0", extras = ["email"], markers = "python_version >= \"3.12\" and python_version < \"4.0\""} +pyyaml = ">=6.0.1" + +[package.extras] +debug = ["PySnooper (>=0.4.1,<2.0.0)"] +http = ["httpx"] +validation = ["openapi-spec-validator (>=0.2.8,<0.7.0)", "prance (>=0.18.2)"] + +[[package]] +name = "deepmerge" +version = "2.0" +description = "A toolset for deeply merging Python dictionaries." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00"}, + {file = "deepmerge-2.0.tar.gz", hash = "sha256:5c3d86081fbebd04dd5de03626a0607b809a98fb6ccba5770b62466fe940ff20"}, +] + +[package.extras] +dev = ["black", "build", "mypy", "pytest", "pyupgrade", "twine", "validate-pyproject[all]"] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "dnspython" +version = "2.7.0" +description = "DNS toolkit" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, +] + +[package.extras] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] +doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] +trio = ["trio (>=0.23)"] +wmi = ["wmi (>=1.5.1)"] + +[[package]] +name = "durationpy" +version = "0.9" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, +] + +[[package]] +name = "email-validator" +version = "2.2.0" +description = "A robust email address syntax and deliverability validation library." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, +] + +[package.dependencies] +dnspython = ">=2.0.0" +idna = ">=2.0.0" + +[[package]] +name = "escapism" +version = "1.0.1" +description = "Simple, generic API for escaping strings." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "escapism-1.0.1-py2.py3-none-any.whl", hash = "sha256:d28f19edc3cb1ffc36fa238956ecc068695477e748f57157c6dde00a6b77f229"}, + {file = "escapism-1.0.1.tar.gz", hash = "sha256:73256bdfb4f22230f0428fc6efecee61cdc4fad531b6f98b849cb9c80711e4ec"}, +] + +[[package]] +name = "frozenlist" +version = "1.6.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3"}, + {file = "frozenlist-1.6.0-cp310-cp310-win32.whl", hash = "sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812"}, + {file = "frozenlist-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e"}, + {file = "frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860"}, + {file = "frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770"}, + {file = "frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc"}, + {file = "frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e"}, + {file = "frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4"}, + {file = "frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c"}, + {file = "frozenlist-1.6.0-cp39-cp39-win32.whl", hash = "sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530"}, + {file = "frozenlist-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572"}, + {file = "frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191"}, + {file = "frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68"}, +] + +[[package]] +name = "genson" +version = "1.3.0" +description = "GenSON is a powerful, user-friendly JSON Schema generator." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, + {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, +] + +[[package]] +name = "google-auth" +version = "2.39.0" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "google_auth-2.39.0-py2.py3-none-any.whl", hash = "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2"}, + {file = "google_auth-2.39.0.tar.gz", hash = "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] + +[[package]] +name = "googleapis-common-protos" +version = "1.70.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, +] + +[package.dependencies] +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] + +[[package]] +name = "greenlet" +version = "3.2.0" +description = "Lightweight in-process concurrent programming" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +files = [ + {file = "greenlet-3.2.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:b7a7b7f2bad3ca72eb2fa14643f1c4ca11d115614047299d89bc24a3b11ddd09"}, + {file = "greenlet-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60e77242e38e99ecaede853755bbd8165e0b20a2f1f3abcaa6f0dceb826a7411"}, + {file = "greenlet-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f32d7c70b1c26844fd0e4e56a1da852b493e4e1c30df7b07274a1e5a9b599e"}, + {file = "greenlet-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97bc1be4bad83b70d8b8627ada6724091af41139616696e59b7088f358583b9"}, + {file = "greenlet-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23f56a0103deb5570c8d6a0bb4ddf8a7a28931973ad7ed7a883460a67e599b32"}, + {file = "greenlet-3.2.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2919b126eeb63ca5fa971501cd20cd6cdb5522369a8e39548bbc73a3e10b8b41"}, + {file = "greenlet-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:844acfd479ee380f3810415e682c9ee941725fb90b45e139bb7fd6f85c6c9a30"}, + {file = "greenlet-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2b986f1a6467710e7ffeeeac1777da0318c95bbfcc467acbd0bd35abc775f558"}, + {file = "greenlet-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:29449a2b82ed7ce11f8668c31ef20d31e9d88cd8329eb933098fab5a8608a93a"}, + {file = "greenlet-3.2.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b99de16560097b9984409ded0032f101f9555e1ab029440fc6a8b5e76dbba7ac"}, + {file = "greenlet-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0bc5776ac2831c022e029839bf1b9d3052332dcf5f431bb88c8503e27398e31"}, + {file = "greenlet-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dcb1108449b55ff6bc0edac9616468f71db261a4571f27c47ccf3530a7f8b97"}, + {file = "greenlet-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:82a68a25a08f51fc8b66b113d1d9863ee123cdb0e8f1439aed9fc795cd6f85cf"}, + {file = "greenlet-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fee6f518868e8206c617f4084a83ad4d7a3750b541bf04e692dfa02e52e805d"}, + {file = "greenlet-3.2.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6fad8a9ca98b37951a053d7d2d2553569b151cd8c4ede744806b94d50d7f8f73"}, + {file = "greenlet-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e14541f9024a280adb9645143d6a0a51fda6f7c5695fd96cb4d542bb563442f"}, + {file = "greenlet-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7f163d04f777e7bd229a50b937ecc1ae2a5b25296e6001445e5433e4f51f5191"}, + {file = "greenlet-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:39801e633a978c3f829f21022501e7b0c3872683d7495c1850558d1a6fb95ed0"}, + {file = "greenlet-3.2.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:7d08b88ee8d506ca1f5b2a58744e934d33c6a1686dd83b81e7999dfc704a912f"}, + {file = "greenlet-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58ef3d637c54e2f079064ca936556c4af3989144e4154d80cfd4e2a59fc3769c"}, + {file = "greenlet-3.2.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33ea7e7269d6f7275ce31f593d6dcfedd97539c01f63fbdc8d84e493e20b1b2c"}, + {file = "greenlet-3.2.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e61d426969b68b2170a9f853cc36d5318030494576e9ec0bfe2dc2e2afa15a68"}, + {file = "greenlet-3.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:04e781447a4722e30b4861af728cb878d73a3df79509dc19ea498090cea5d204"}, + {file = "greenlet-3.2.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b2392cc41eeed4055978c6b52549ccd9effd263bb780ffd639c0e1e7e2055ab0"}, + {file = "greenlet-3.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:430cba962c85e339767235a93450a6aaffed6f9c567e73874ea2075f5aae51e1"}, + {file = "greenlet-3.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5e57ff52315bfc0c5493917f328b8ba3ae0c0515d94524453c4d24e7638cbb53"}, + {file = "greenlet-3.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:211a9721f540e454a02e62db7956263e9a28a6cf776d4b9a7213844e36426333"}, + {file = "greenlet-3.2.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:b86a3ccc865ae601f446af042707b749eebc297928ea7bd0c5f60c56525850be"}, + {file = "greenlet-3.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:144283ad88ed77f3ebd74710dd419b55dd15d18704b0ae05935766a93f5671c5"}, + {file = "greenlet-3.2.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5be69cd50994b8465c3ad1467f9e63001f76e53a89440ad4440d1b6d52591280"}, + {file = "greenlet-3.2.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:47aeadd1e8fbdef8fdceb8fb4edc0cbb398a57568d56fd68f2bc00d0d809e6b6"}, + {file = "greenlet-3.2.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18adc14ab154ca6e53eecc9dc50ff17aeb7ba70b7e14779b26e16d71efa90038"}, + {file = "greenlet-3.2.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e8622b33d8694ec373ad55050c3d4e49818132b44852158442e1931bb02af336"}, + {file = "greenlet-3.2.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:e8ac9a2c20fbff3d0b853e9ef705cdedb70d9276af977d1ec1cde86a87a4c821"}, + {file = "greenlet-3.2.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:cd37273dc7ca1d5da149b58c8b3ce0711181672ba1b09969663905a765affe21"}, + {file = "greenlet-3.2.0-cp313-cp313-win_amd64.whl", hash = "sha256:8a8940a8d301828acd8b9f3f85db23069a692ff2933358861b19936e29946b95"}, + {file = "greenlet-3.2.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee59db626760f1ca8da697a086454210d36a19f7abecc9922a2374c04b47735b"}, + {file = "greenlet-3.2.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7154b13ef87a8b62fc05419f12d75532d7783586ad016c57b5de8a1c6feeb517"}, + {file = "greenlet-3.2.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:199453d64b02d0c9d139e36d29681efd0e407ed8e2c0bf89d88878d6a787c28f"}, + {file = "greenlet-3.2.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0010e928e1901d36625f21d008618273f9dda26b516dbdecf873937d39c9dff0"}, + {file = "greenlet-3.2.0-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6005f7a86de836a1dc4b8d824a2339cdd5a1ca7cb1af55ea92575401f9952f4c"}, + {file = "greenlet-3.2.0-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:17fd241c0d50bacb7ce8ff77a30f94a2d0ca69434ba2e0187cf95a5414aeb7e1"}, + {file = "greenlet-3.2.0-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:7b17a26abc6a1890bf77d5d6b71c0999705386b00060d15c10b8182679ff2790"}, + {file = "greenlet-3.2.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:397b6bbda06f8fe895893d96218cd6f6d855a6701dc45012ebe12262423cec8b"}, + {file = "greenlet-3.2.0-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:4174fa6fa214e8924cedf332b6f2395ba2b9879f250dacd3c361b2fca86f58af"}, + {file = "greenlet-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6017a4d430fad5229e397ad464db504ae70cb7b903757c4688cee6c25d6ce8d8"}, + {file = "greenlet-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78b721dfadc60e3639141c0e1f19d23953c5b4b98bfcaf04ce40f79e4f01751c"}, + {file = "greenlet-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fd2583024ff6cd5d4f842d446d001de4c4fe1264fdb5f28ddea28f6488866df"}, + {file = "greenlet-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da3bd464c2cc411b723e3d4afc27b13c219ac077ba897bac88443ae45f5ec"}, + {file = "greenlet-3.2.0-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2688b3bd3198cc4bad7a79648a95fee088c24a0f6abd05d3639e6c3040ded015"}, + {file = "greenlet-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:1cf89e2d92bae0d7e2d6093ce0bed26feeaf59a5d588e3984e35fcd46fc41090"}, + {file = "greenlet-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b3538711e7c0efd5f7a8fc1096c4db9598d6ed99dc87286b31e4ce9f8a8da67"}, + {file = "greenlet-3.2.0-cp39-cp39-win32.whl", hash = "sha256:ce531d7c424ef327a391de7a9777a6c93a38e1f89e18efa903a1c4ba11f85905"}, + {file = "greenlet-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:7b162de2fb61b4c7f4b5d749408bf3280cae65db9b5a6aaf7f922ac829faa67c"}, + {file = "greenlet-3.2.0.tar.gz", hash = "sha256:1d2d43bd711a43db8d9b9187500e6432ddb4fafe112d082ffabca8660a9e01a7"}, +] + +[package.extras] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] + +[[package]] +name = "grpc-interceptor" +version = "0.15.4" +description = "Simplifies gRPC interceptors" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926"}, + {file = "grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d"}, +] + +[package.dependencies] +grpcio = ">=1.49.1,<2.0.0" + +[package.extras] +testing = ["protobuf (>=4.21.9)"] + +[[package]] +name = "grpcio" +version = "1.71.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.71.0)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "html5tagger" +version = "1.3.0" +description = "Pythonic HTML generation/templating (no template files)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}, + {file = "html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}, +] + +[[package]] +name = "httpcore" +version = "1.0.8" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, + {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httptools" +version = "0.6.4" +description = "A collection of framework independent HTTP protocol utils." +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, +] + +[package.extras] +test = ["Cython (>=0.29.24)"] + +[[package]] +name = "httpx" +version = "0.28.1" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" + +[package.extras] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "httpx-ws" +version = "0.7.2" +description = "WebSockets support for HTTPX" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "httpx_ws-0.7.2-py3-none-any.whl", hash = "sha256:dd7bf9dbaa96dcd5cef1af3a7e1130cfac068bebecce25a74145022f5a8427a3"}, + {file = "httpx_ws-0.7.2.tar.gz", hash = "sha256:93edea6c8fc313464fc287bff7d2ad20e6196b7754c76f946f73b4af79886d4e"}, +] + +[package.dependencies] +anyio = ">=4" +httpcore = ">=1.0.4" +httpx = ">=0.23.1" +wsproto = "*" + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "inflect" +version = "5.6.2" +description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238"}, + {file = "inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9"}, +] + +[package.extras] +docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] + +[[package]] +name = "isort" +version = "5.13.2" +description = "A Python utility / library to sort Python imports." +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, + {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, +] + +[package.extras] +colors = ["colorama (>=0.4.6)"] + +[[package]] +name = "jinja2" +version = "3.1.6" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + +[[package]] +name = "kr8s" +version = "0.20.7" +description = "A Kubernetes API library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "kr8s-0.20.7-py3-none-any.whl", hash = "sha256:e489b97ff513c167f427f479ad5420c78adffd1a6ce5033b079109374200c0c6"}, + {file = "kr8s-0.20.7.tar.gz", hash = "sha256:ac45e966beea0f6f92f635b3e61e64b8e27962b4825d77b814a663e819a8ec16"}, +] + +[package.dependencies] +anyio = ">=3.7.0" +asyncache = ">=0.3.1" +cryptography = ">=35" +httpx = ">=0.24.1" +httpx-ws = ">=0.7.0" +python-box = ">=7.0.1" +python-jsonpath = ">=0.7.1" +pyyaml = ">=6.0" +typing-extensions = ">=4.12.2" + +[package.extras] +docs = ["furo (>=2023.3.27)", "myst-parser (>=1.0.0)", "sphinx (>=5.3.0)", "sphinx-autoapi (>=2.1.0)", "sphinx-autobuild (>=2021.3.14)", "sphinx-copybutton (>=0.5.1)", "sphinx-design (>=0.3.0)", "sphinxcontrib-mermaid (>=0.8.1)"] +test = ["kubernetes (>=26.1.0)", "kubernetes-asyncio (>=24.2.3)", "kubernetes-validate (>=1.28.0)", "lightkube (>=0.13.0)", "pykube-ng (>=23.6.0)", "pytest (>=7.2.2)", "pytest-asyncio (>=0.20.3)", "pytest-cov (>=4.0.0)", "pytest-kind (>=22.11.1)", "pytest-rerunfailures (>=11.1.2)", "pytest-timeout (>=2.1.0)", "trio (>=0.22.0)", "types-pyyaml (>=6.0)"] + +[[package]] +name = "kubernetes" +version = "31.0.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, + {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +durationpy = ">=0.7" +google-auth = ">=1.0.1" +oauthlib = ">=3.2.2" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "kubernetes-asyncio" +version = "32.3.0" +description = "Kubernetes asynchronous python client" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "kubernetes_asyncio-32.3.0-py3-none-any.whl", hash = "sha256:3a0769d4bf39c638e474c76cd22f4aa81903db5ebd14573c1e3b3b7ebbf86fbc"}, + {file = "kubernetes_asyncio-32.3.0.tar.gz", hash = "sha256:3efdc39776f4e1c892ce08b74364e67be6c1d6870cba01ab27bb296fdc6fc485"}, +] + +[package.dependencies] +aiohttp = ">=3.9.0,<4.0.0" +certifi = ">=14.05.14" +python-dateutil = ">=2.5.3" +pyyaml = ">=3.12" +six = ">=1.9.0" +urllib3 = ">=1.24.2" + +[[package]] +name = "lark" +version = "0.12.0" +description = "a modern parsing library" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "lark-0.12.0-py2.py3-none-any.whl", hash = "sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca"}, + {file = "lark-0.12.0.tar.gz", hash = "sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef"}, +] + +[package.extras] +atomic-cache = ["atomicwrites"] +nearley = ["js2py"] +regex = ["regex"] + +[[package]] +name = "mako" +version = "1.3.10" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, +] + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[package.extras] +babel = ["Babel"] +lingua = ["lingua"] +testing = ["pytest"] + +[[package]] +name = "markdown-code-runner" +version = "2.2.0" +description = "Automatically execute code blocks within a Markdown file and update the output in-place" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markdown_code_runner-2.2.0-py3-none-any.whl", hash = "sha256:d8812c48ad3fd4a3f3725dfcd5a1b7e5baf7216855eeea8a92c7fd9120717ac6"}, + {file = "markdown_code_runner-2.2.0.tar.gz", hash = "sha256:3c495998a437bc7d7a4b1a5ce518bce10cf5ba0fa69c569fee1e32c5238603c4"}, +] + +[package.extras] +test = ["coverage", "pre-commit", "pytest", "pytest-cov"] + +[[package]] +name = "markupsafe" +version = "3.0.2" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] + +[[package]] +name = "marshmallow" +version = "3.26.1" +description = "A lightweight library for converting complex datatypes to and from native Python datatypes." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, +] + +[package.dependencies] +packaging = ">=17.0" + +[package.extras] +dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] +docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] +tests = ["pytest", "simplejson"] + +[[package]] +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, +] + +[[package]] +name = "multidict" +version = "6.4.3" +description = "multidict implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +groups = ["main"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "parsy" +version = "2.1" +description = "Easy-to-use parser combinators, for parsing in pure Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "parsy-2.1-py3-none-any.whl", hash = "sha256:8f18e7b11985e7802e7e3ecbd8291c6ca243d29820b1186e4c84605db4efffa0"}, + {file = "parsy-2.1.tar.gz", hash = "sha256:fd5dd18d7b0b61f8275ee88665f430a20c02cf5a82d88557f35330530186d7ac"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "platformdirs" +version = "4.3.7" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "posthog" +version = "3.25.0" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "posthog-3.25.0-py2.py3-none-any.whl", hash = "sha256:85db78c13d1ecb11aed06fad53759c4e8fb3633442c2f3d0336bc0ce8a585d30"}, + {file = "posthog-3.25.0.tar.gz", hash = "sha256:9168f3e7a0a5571b6b1065c41b3c171fbc68bfe72c3ac0bfd6e3d2fcdb7df2ca"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +distro = ">=1.5.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"] +langchain = ["langchain (>=0.2.0)"] +sentry = ["django", "sentry-sdk"] +test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"] + +[[package]] +name = "prometheus-client" +version = "0.7.1" +description = "Python client for the Prometheus monitoring system." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "prometheus_client-0.7.1.tar.gz", hash = "sha256:71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da"}, +] + +[package.extras] +twisted = ["twisted"] + +[[package]] +name = "prometheus-sanic" +version = "3.0.0" +description = "Exposes Prometheus monitoring metrics of Sanic apps." +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "prometheus-sanic-3.0.0.tar.gz", hash = "sha256:06cfe8f9c843a1324fa801b9092f26470a63196b9e08fad0c0f12b49ddbf6c3c"}, + {file = "prometheus_sanic-3.0.0-py3-none-any.whl", hash = "sha256:499110bf2a86f921b229083e0bcea4d489420abf6737e0d838cd234394fd91aa"}, +] + +[package.dependencies] +prometheus-client = ">=0.7.1,<0.8.0" +sanic = ">=22.0.0" + +[[package]] +name = "propcache" +version = "0.3.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, + {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, + {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, + {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, + {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, + {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, + {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, + {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, + {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, + {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, + {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, + {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, + {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, + {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, + {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, +] + +[[package]] +name = "protobuf" +version = "5.29.4" +description = "" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, +] + +[[package]] +name = "protovalidate" +version = "0.7.1" +description = "Protocol Buffer Validation for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "protovalidate-0.7.1-py3-none-any.whl", hash = "sha256:6788b1baa10c2e9453c3a3eef5f87a3e9c871bc9a7110b506aefd764269c8b3e"}, + {file = "protovalidate-0.7.1.tar.gz", hash = "sha256:12bd7c126fc000c5cbee5bf0f4cd01e0ba0e353f585b0aaa68df03e788939412"}, +] + +[package.dependencies] +cel-python = "*" +protobuf = "*" + +[[package]] +name = "psycopg" +version = "3.2.6" +description = "PostgreSQL database adapter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58"}, + {file = "psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a"}, +] + +[package.dependencies] +psycopg-binary = {version = "3.2.6", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +binary = ["psycopg-binary (==3.2.6) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.6) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] +pool = ["psycopg-pool"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] + +[[package]] +name = "psycopg-binary" +version = "3.2.6" +description = "PostgreSQL database adapter for Python -- C optimisation distribution" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "implementation_name != \"pypy\"" +files = [ + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1b639acb3e24243c23f75700bf6e3af7b76da92523ec7c3196a13aaf0b578453"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1b5c359173726b38d7acbb9f73270f269591d8031d099c1a70dd3f3d22b0e8a8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3434efe7c00f505f4c1e531519dac6c701df738ba7a1328eac81118d80019132"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bca8d9643191b13193940bbf84d51ac5a747e965c230177258fb02b8043fb7a"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55fa40f11d37e6e5149a282a5fd7e0734ce55c623673bfba638480914fd1414c"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0690ac1061c655b1bcbe9284d07bf5276bc9c0d788a6c74aaf3b042e64984b83"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9a4a9967ff650d2821d5fad6bec7b15f4c2072603e9fa3f89a39f351ade1fd3"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6f2894cc7aee8a15fe591e8536911d9c015cb404432cf7bdac2797e54cb2ba8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:05560c81312d7c2bee95a9860cd25198677f2320fb4a3527bc04e8cae7fcfb64"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4269cd23a485d6dd6eb6b10841c94551a53091cf0b1b6d5247a6a341f53f0d95"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:7942f35a6f314608720116bcd9de240110ceadffd2ac5c34f68f74a31e52e46a"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7afe181f6b3eb714362e9b6a2dc2a589bff60471a1d8639fd231a4e426e01523"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34bb0fceba0773dc0bfb53224bb2c0b19dc97ea0a997a223615484cf02cae55c"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54120122d2779dcd307f49e1f921d757fe5dacdced27deab37f277eef0c52a5b"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:816aa556f63b2303e66ba6c8888a8b3f3e6e4e47049ec7a4d62c84ac60b091ca"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19a0ba351eda9a59babf8c7c9d89c7bbc5b26bf096bc349b096bd0dd2482088"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e197e01290ef818a092c877025fc28096adbb6d0743e313491a21aab31bd96"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:274794b4b29ef426e09086404446b61a146f5e756da71366c5a6d57abec31f7d"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:69845bdc0db519e1dfc27932cd3d5b1ecb3f72950af52a1987508ab0b52b3b55"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:66c3bed2caf0d1cabcb9365064de183b5209a7cbeaa131e79e68f350c9c963c2"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e3ae3201fe85c7f901349a2cf52f02ceca4cb97a5e2e2ac8b8a1c9a6eb747bed"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:58f443b4df2adb59937c96775fadf4967f93d952fbcc82394446985faec11041"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f27a46ff0497e882e8c0286e8833c785b4d1a80f23e1bf606f4c90e5f9f3ce75"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b30ee4821ded7de48b8048b14952512588e7c5477b0a5965221e1798afba61a1"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e57edf3b1f5427f39660225b01f8e7b97f5cfab132092f014bf1638bc85d81d2"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c5172ce3e4ae7a4fd450070210f801e2ce6bc0f11d1208d29268deb0cda34de"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfab3804c43571a6615e559cdc4c4115785d258a4dd71a721be033f5f5f378d"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa1c920cce16f1205f37b20c685c58b9656b170b8b4c93629100d342d0d118e"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e118d818101c1608c6b5ba52a6c977614d8f05aa89467501172ba4d10588e11"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:763319a8bfeca77d31512da71f5a33459b9568a7621c481c3828c62f9c38f351"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2fbc05819560389dbece046966bc88e0f2ea77673497e274c4293b8b4c1d0703"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a57f99bb953b4bd6f32d0a9844664e7f6ca5ead9ba40e96635be3cd30794813"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:5de6809e19a465dcb9c269675bded46a135f2d600cd99f0735afbb21ddad2af4"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54af3fbf871baa2eb19df96fd7dc0cbd88e628a692063c3d1ab5cdd00aa04322"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ad5da1e4636776c21eaeacdec42f25fa4612631a12f25cd9ab34ddf2c346ffb9"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7956b9ea56f79cd86eddcfbfc65ae2af1e4fe7932fa400755005d903c709370"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e2efb763188008cf2914820dcb9fb23c10fe2be0d2c97ef0fac7cec28e281d8"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b3aab3451679f1e7932270e950259ed48c3b79390022d3f660491c0e65e4838"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849a370ac4e125f55f2ad37f928e588291a67ccf91fa33d0b1e042bb3ee1f986"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:566d4ace928419d91f1eb3227fc9ef7b41cf0ad22e93dd2c3368d693cf144408"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f1981f13b10de2f11cfa2f99a8738b35b3f0a0f3075861446894a8d3042430c0"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:36f598300b55b3c983ae8df06473ad27333d2fd9f3e2cfdb913b3a5aaa3a8bcf"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0f4699fa5fe1fffb0d6b2d14b31fd8c29b7ea7375f89d5989f002aaf21728b21"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:afe697b8b0071f497c5d4c0f41df9e038391534f5614f7fb3a8c1ca32d66e860"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da5554553b8d9fb7ab6bb1a37cc53f20ada9024916c60f40c09ab1a675323f2f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b7e3ccc43c395edba8039c9e407b01ed1844304c7f2f4aa99d34d04ed067c83"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d55405efc8a96aa0ecb2d5d6af552d35c744f160b133fa690814a68d9a952c8"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58d5cfb1687b69b3484a034d1aa6e5c11f0c1d46757e978ed59fab59ce83fd37"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3761c4107dab218c32ce4b10b1ae5ed686d41b882bfcb05f5bebc2be9488442f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:45f1526e12cb480586c74670f46563d3090fc2a93e859ccf71efae61f04cef4b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b4d4fd4415d5219785fb082e28d84be4fbd90c3bff3d861877db0aa6b0edd70b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:eb8a1e6b8130fee0b48107739e09553d50c6f031d0b3fcc33f885bb64fa01105"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7adf1460c05f7366f0fe9cf2d24e46abca9eb621705322bbd0c3f3e3a5edb2b4"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:28505f52ceef60554b5ab3289bf5aed2e7e57fa8e9a59a979d82db944e256a6c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:260c43c329e668606388cee78ec0dab083a25c2c6e6f9cf74a130fd5a27b0f87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9870e51fad4684dbdec057fa757d65e61cb2acb16236836e9360044c2a1ec880"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030e9c3082a931e972b029b3cef085784a3bf7f8e18367ae50d5b809aa6e1d87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60c9ed291fbd5e777c2c630dcfd10b7a87d68512b0757d5e7406d9c4895a82a"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e0f4a17a9c376c195e403b4826c18f325bd28f425231d36d1036258bf893e23"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac46da609624b16d961f604b3cbc3233ef43211ef1456a188f8c427109c9c3e1"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e77949b8e7014b85cee0bf6e9e041bcae7719b2693ebf59236368fb0b2a08814"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:532322d9ef6e7d178a4f344970b017110633bcc3dc1c3403efcef55aad612517"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:880c5fd76dcb50bdcc8f87359e5a6c7eb416697cc9aa02854c91223bd999c045"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c0cddc7458b8416d77cd8829d0192466502f31d1fb853d58613cf13ac64f41c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:ea158665676f42b19585dfe948071d3c5f28276f84a97522fb2e82c1d9194563"}, +] + +[[package]] +name = "pyasn1" +version = "0.6.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.4.2" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, +] + +[package.dependencies] +pyasn1 = ">=0.6.1,<0.7.0" + +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + +[[package]] +name = "pydantic" +version = "2.11.3" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} +pydantic-core = "2.33.1" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, +] + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "python-box" +version = "7.3.2" +description = "Advanced Python dictionaries with dot notation access" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_box-7.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d136163294fd61a1554db7dd203f2e3035064798d30c17d67d948f0de5c572de"}, + {file = "python_box-7.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d72e96547d8e2c2c333909826e9fae338d9a7e4cde07d5c6058cdd468432c0"}, + {file = "python_box-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:3aa52e3b5cc50c80bb7ef4be3e41e81d095310f619454a7ffd61eef3209a6225"}, + {file = "python_box-7.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:32163b1cb151883de0da62b0cd3572610dc72ccf0762f2447baf1d2562e25bea"}, + {file = "python_box-7.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:064cb59b41e25aaf7dbd39efe53151a5f6797cc1cb3c68610f0f21a9d406d67e"}, + {file = "python_box-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:488f0fba9a6416c3334b602366dcd92825adb0811e07e03753dfcf0ed79cd6f7"}, + {file = "python_box-7.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:39009a2da5c20133718b24891a206592adbe09169856aedc450ad1600fc2e511"}, + {file = "python_box-7.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2a72e2f6fb97c7e472ff3272da207ecc615aa222e52e98352391428527c469"}, + {file = "python_box-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9eead914b9fb7d98a1473f5027dcfe27d26b3a10ffa33b9ba22cf948a23cd280"}, + {file = "python_box-7.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1dfc3b9b073f3d7cad1fa90de98eaaa684a494d0574bbc0666f74fa8307fd6b6"}, + {file = "python_box-7.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca4685a7f764b5a71b6e08535ce2a96b7964bb63d8cb4df10f6bb7147b6c54b"}, + {file = "python_box-7.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e143295f74d47a9ab24562ead2375c9be10629599b57f2e86717d3fff60f82a9"}, + {file = "python_box-7.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f3118ab3076b645c76133b8fac51deee30237cecdcafc3af664c4b9000f04db9"}, + {file = "python_box-7.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a760074ba12ccc247796f43b6c61f686ada4b8349ab59e2a6303b27f3ae082"}, + {file = "python_box-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ea436e7ff5f87bd728472f1e31a9e6e95572c81028c44a8e00097e0968955638"}, + {file = "python_box-7.3.2-py3-none-any.whl", hash = "sha256:fd7d74d5a848623f93b5221fd9fb00b8c00ff0e130fa87f396277aa188659c92"}, + {file = "python_box-7.3.2.tar.gz", hash = "sha256:028b9917129e67f311932d93347b8a4f1b500d7a5a2870ee3c035f4e7b19403b"}, +] + +[package.extras] +all = ["msgpack", "ruamel.yaml (>=0.17)", "toml"] +msgpack = ["msgpack"] +pyyaml = ["PyYAML"] +ruamel-yaml = ["ruamel.yaml (>=0.17)"] +toml = ["toml"] +tomli = ["tomli ; python_version < \"3.11\"", "tomli-w"] +yaml = ["ruamel.yaml (>=0.17)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-gitlab" +version = "5.6.0" +description = "The python wrapper for the GitLab REST and GraphQL APIs." +optional = false +python-versions = ">=3.9.0" +groups = ["main"] +files = [ + {file = "python_gitlab-5.6.0-py3-none-any.whl", hash = "sha256:68980cd70929fc7f8f06d8a7b09bd046a6b79e1995c19d61249f046005099100"}, + {file = "python_gitlab-5.6.0.tar.gz", hash = "sha256:bc531e8ba3e5641b60409445d4919ace68a2c18cb0ec6d48fbced6616b954166"}, +] + +[package.dependencies] +requests = ">=2.32.0" +requests-toolbelt = ">=1.0.0" + +[package.extras] +autocompletion = ["argcomplete (>=1.10.0,<3)"] +graphql = ["gql[httpx] (>=3.5.0,<4)"] +yaml = ["PyYaml (>=6.0.1)"] + +[[package]] +name = "python-jsonpath" +version = "1.3.0" +description = "JSONPath, JSON Pointer and JSON Patch for Python." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "python_jsonpath-1.3.0-py3-none-any.whl", hash = "sha256:ce586ec5bd934ce97bc2f06600b00437d9684138b77273ced5b70694a8ef3a76"}, + {file = "python_jsonpath-1.3.0.tar.gz", hash = "sha256:ea5eb4d9b1296c8c19cc53538eb0f20fc54128f84571559ee63539e57875fefe"}, +] + +[[package]] +name = "python-ulid" +version = "3.0.0" +description = "Universally unique lexicographically sortable identifier" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, + {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, +] + +[package.extras] +pydantic = ["pydantic (>=2.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "2.0.0" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=3.4" +groups = ["main"] +files = [ + {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, + {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "requests-toolbelt" +version = "1.0.0" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] +files = [ + {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, + {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rsa" +version = "4.9.1" +description = "Pure-Python RSA implementation" +optional = false +python-versions = "<4,>=3.6" +groups = ["main"] +files = [ + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "sanic" +version = "24.12.0" +description = "A web server and web framework that's written to go fast. Build fast. Run fast." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "sanic-24.12.0-py3-none-any.whl", hash = "sha256:3c2a01ec0b6c5926e3efe34eac1b497d31ed989038fe213eb25ad0c98687d388"}, + {file = "sanic-24.12.0.tar.gz", hash = "sha256:09c23aa917616c1e60e44c66dfd7582cb9fd6503f78298c309945909f5839836"}, +] + +[package.dependencies] +aiofiles = ">=0.6.0" +html5tagger = ">=1.2.1" +httptools = ">=0.0.10" +multidict = ">=5.0,<7.0" +sanic-ext = {version = "*", optional = true, markers = "extra == \"ext\""} +sanic-routing = ">=23.12.0" +setuptools = ">=70.1.0" +tracerite = ">=1.0.0" +typing-extensions = ">=4.4.0" +ujson = {version = ">=1.35", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} +uvloop = {version = ">=0.15.0", markers = "sys_platform != \"win32\" and implementation_name == \"cpython\""} +websockets = ">=10.0" + +[package.extras] +all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)"] +ext = ["sanic-ext"] +http3 = ["aioquic"] +test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] + +[[package]] +name = "sanic-ext" +version = "23.12.0" +description = "Extend your Sanic installation with some core functionality." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "sanic-ext-23.12.0.tar.gz", hash = "sha256:42fc41e7fafa58f3b790f685f3dd8a8de281460b4169d0e91f4e11b8747f845c"}, + {file = "sanic_ext-23.12.0-py3-none-any.whl", hash = "sha256:3ba2c143d7c41d89b87a11c6214b9d9b52c3994ff8ce3a03792b54ec5627e2c3"}, +] + +[package.dependencies] +pyyaml = ">=3.0.0" + +[package.extras] +dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] +test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] + +[[package]] +name = "sanic-routing" +version = "23.12.0" +description = "Core routing component for Sanic" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}, + {file = "sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}, +] + +[[package]] +name = "sentry-sdk" +version = "2.26.1" +description = "Python client for Sentry (https://sentry.io)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "sentry_sdk-2.26.1-py2.py3-none-any.whl", hash = "sha256:e99390e3f217d13ddcbaeaed08789f1ca614d663b345b9da42e35ad6b60d696a"}, + {file = "sentry_sdk-2.26.1.tar.gz", hash = "sha256:759e019c41551a21519a95e6cef6d91fb4af1054761923dadaee2e6eca9c02c7"}, +] + +[package.dependencies] +certifi = "*" +sanic = {version = ">=0.8", optional = true, markers = "extra == \"sanic\""} +urllib3 = ">=1.26.11" + +[package.extras] +aiohttp = ["aiohttp (>=3.5)"] +anthropic = ["anthropic (>=0.16)"] +arq = ["arq (>=0.23)"] +asyncpg = ["asyncpg (>=0.23)"] +beam = ["apache-beam (>=2.12)"] +bottle = ["bottle (>=0.12.13)"] +celery = ["celery (>=3)"] +celery-redbeat = ["celery-redbeat (>=2)"] +chalice = ["chalice (>=1.16.0)"] +clickhouse-driver = ["clickhouse-driver (>=0.2.0)"] +django = ["django (>=1.8)"] +falcon = ["falcon (>=1.4)"] +fastapi = ["fastapi (>=0.79.0)"] +flask = ["blinker (>=1.1)", "flask (>=0.11)", "markupsafe"] +grpcio = ["grpcio (>=1.21.1)", "protobuf (>=3.8.0)"] +http2 = ["httpcore[http2] (==1.*)"] +httpx = ["httpx (>=0.16.0)"] +huey = ["huey (>=2)"] +huggingface-hub = ["huggingface_hub (>=0.22)"] +langchain = ["langchain (>=0.0.210)"] +launchdarkly = ["launchdarkly-server-sdk (>=9.8.0)"] +litestar = ["litestar (>=2.0.0)"] +loguru = ["loguru (>=0.5)"] +openai = ["openai (>=1.0.0)", "tiktoken (>=0.3.0)"] +openfeature = ["openfeature-sdk (>=0.7.1)"] +opentelemetry = ["opentelemetry-distro (>=0.35b0)"] +opentelemetry-experimental = ["opentelemetry-distro"] +pure-eval = ["asttokens", "executing", "pure_eval"] +pymongo = ["pymongo (>=3.1)"] +pyspark = ["pyspark (>=2.4.4)"] +quart = ["blinker (>=1.1)", "quart (>=0.16.1)"] +rq = ["rq (>=0.6)"] +sanic = ["sanic (>=0.8)"] +sqlalchemy = ["sqlalchemy (>=1.2)"] +starlette = ["starlette (>=0.19.1)"] +starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] +tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] + +[[package]] +name = "setuptools" +version = "75.9.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "setuptools-75.9.1-py3-none-any.whl", hash = "sha256:0a6f876d62f4d978ca1a11ab4daf728d1357731f978543ff18ecdbf9fd071f73"}, + {file = "setuptools-75.9.1.tar.gz", hash = "sha256:b6eca2c3070cdc82f71b4cb4bb2946bc0760a210d11362278cf1ff394e6ea32c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.40" +description = "Database Abstraction Library" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, +] + +[package.dependencies] +greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +typing-extensions = ">=4.6.0" + +[package.extras] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] +mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] +mssql = ["pyodbc"] +mssql-pymssql = ["pymssql"] +mssql-pyodbc = ["pyodbc"] +mypy = ["mypy (>=0.910)"] +mysql = ["mysqlclient (>=1.4.0)"] +mysql-connector = ["mysql-connector-python"] +oracle = ["cx_oracle (>=8)"] +oracle-oracledb = ["oracledb (>=1.0.1)"] +postgresql = ["psycopg2 (>=2.7)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] +postgresql-pg8000 = ["pg8000 (>=1.29.1)"] +postgresql-psycopg = ["psycopg (>=3.0.7)"] +postgresql-psycopg2binary = ["psycopg2-binary"] +postgresql-psycopg2cffi = ["psycopg2cffi"] +postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] +pymysql = ["pymysql"] +sqlcipher = ["sqlcipher3_binary"] + +[[package]] +name = "tenacity" +version = "9.1.2" +description = "Retry code until it succeeds" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, +] + +[package.extras] +doc = ["reno", "sphinx"] +test = ["pytest", "tornado (>=4.5)", "typeguard"] + +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + +[[package]] +name = "tracerite" +version = "1.1.1" +description = "Human-readable HTML tracebacks for Python exceptions" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "tracerite-1.1.1-py3-none-any.whl", hash = "sha256:3a787a9ecb1a136ea9ce17e6328e414ec414a4f644130af4e1e330bec2dece29"}, + {file = "tracerite-1.1.1.tar.gz", hash = "sha256:6400a35a187747189e4bb8d4a8e471bd86d14dbdcc94bcad23f4eda023f41356"}, +] + +[package.dependencies] +html5tagger = ">=1.2.1" + +[[package]] +name = "types-python-dateutil" +version = "2.9.0.20241206" +description = "Typing stubs for python-dateutil" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250402" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, +] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "tzdata" +version = "2025.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, +] + +[[package]] +name = "ujson" +version = "5.10.0" +description = "Ultra fast JSON encoder and decoder for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" +files = [ + {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, + {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22cffecf73391e8abd65ef5f4e4dd523162a3399d5e84faa6aebbf9583df86d6"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26b0e2d2366543c1bb4fbd457446f00b0187a2bddf93148ac2da07a53fe51569"}, + {file = "ujson-5.10.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf270c6dba1be7a41125cd1e4fc7ba384bf564650beef0df2dd21a00b7f5770"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a245d59f2ffe750446292b0094244df163c3dc96b3ce152a2c837a44e7cda9d1"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:94a87f6e151c5f483d7d54ceef83b45d3a9cca7a9cb453dbdbb3f5a6f64033f5"}, + {file = "ujson-5.10.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:29b443c4c0a113bcbb792c88bea67b675c7ca3ca80c3474784e08bba01c18d51"}, + {file = "ujson-5.10.0-cp310-cp310-win32.whl", hash = "sha256:c18610b9ccd2874950faf474692deee4223a994251bc0a083c114671b64e6518"}, + {file = "ujson-5.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:924f7318c31874d6bb44d9ee1900167ca32aa9b69389b98ecbde34c1698a250f"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a5b366812c90e69d0f379a53648be10a5db38f9d4ad212b60af00bd4048d0f00"}, + {file = "ujson-5.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:502bf475781e8167f0f9d0e41cd32879d120a524b22358e7f205294224c71126"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b91b5d0d9d283e085e821651184a647699430705b15bf274c7896f23fe9c9d8"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:129e39af3a6d85b9c26d5577169c21d53821d8cf68e079060602e861c6e5da1b"}, + {file = "ujson-5.10.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f77b74475c462cb8b88680471193064d3e715c7c6074b1c8c412cb526466efe9"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7ec0ca8c415e81aa4123501fee7f761abf4b7f386aad348501a26940beb1860f"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab13a2a9e0b2865a6c6db9271f4b46af1c7476bfd51af1f64585e919b7c07fd4"}, + {file = "ujson-5.10.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:57aaf98b92d72fc70886b5a0e1a1ca52c2320377360341715dd3933a18e827b1"}, + {file = "ujson-5.10.0-cp311-cp311-win32.whl", hash = "sha256:2987713a490ceb27edff77fb184ed09acdc565db700ee852823c3dc3cffe455f"}, + {file = "ujson-5.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:f00ea7e00447918ee0eff2422c4add4c5752b1b60e88fcb3c067d4a21049a720"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:98ba15d8cbc481ce55695beee9f063189dce91a4b08bc1d03e7f0152cd4bbdd5"}, + {file = "ujson-5.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9d2edbf1556e4f56e50fab7d8ff993dbad7f54bac68eacdd27a8f55f433578e"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6627029ae4f52d0e1a2451768c2c37c0c814ffc04f796eb36244cf16b8e57043"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8ccb77b3e40b151e20519c6ae6d89bfe3f4c14e8e210d910287f778368bb3d1"}, + {file = "ujson-5.10.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3caf9cd64abfeb11a3b661329085c5e167abbe15256b3b68cb5d914ba7396f3"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6e32abdce572e3a8c3d02c886c704a38a1b015a1fb858004e03d20ca7cecbb21"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a65b6af4d903103ee7b6f4f5b85f1bfd0c90ba4eeac6421aae436c9988aa64a2"}, + {file = "ujson-5.10.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:604a046d966457b6cdcacc5aa2ec5314f0e8c42bae52842c1e6fa02ea4bda42e"}, + {file = "ujson-5.10.0-cp312-cp312-win32.whl", hash = "sha256:6dea1c8b4fc921bf78a8ff00bbd2bfe166345f5536c510671bccececb187c80e"}, + {file = "ujson-5.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:38665e7d8290188b1e0d57d584eb8110951a9591363316dd41cf8686ab1d0abc"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:618efd84dc1acbd6bff8eaa736bb6c074bfa8b8a98f55b61c38d4ca2c1f7f287"}, + {file = "ujson-5.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38d5d36b4aedfe81dfe251f76c0467399d575d1395a1755de391e58985ab1c2e"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67079b1f9fb29ed9a2914acf4ef6c02844b3153913eb735d4bf287ee1db6e557"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7d0e0ceeb8fe2468c70ec0c37b439dd554e2aa539a8a56365fd761edb418988"}, + {file = "ujson-5.10.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:59e02cd37bc7c44d587a0ba45347cc815fb7a5fe48de16bf05caa5f7d0d2e816"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2a890b706b64e0065f02577bf6d8ca3b66c11a5e81fb75d757233a38c07a1f20"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:621e34b4632c740ecb491efc7f1fcb4f74b48ddb55e65221995e74e2d00bbff0"}, + {file = "ujson-5.10.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b9500e61fce0cfc86168b248104e954fead61f9be213087153d272e817ec7b4f"}, + {file = "ujson-5.10.0-cp313-cp313-win32.whl", hash = "sha256:4c4fc16f11ac1612f05b6f5781b384716719547e142cfd67b65d035bd85af165"}, + {file = "ujson-5.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:4573fd1695932d4f619928fd09d5d03d917274381649ade4328091ceca175539"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a984a3131da7f07563057db1c3020b1350a3e27a8ec46ccbfbf21e5928a43050"}, + {file = "ujson-5.10.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:73814cd1b9db6fc3270e9d8fe3b19f9f89e78ee9d71e8bd6c9a626aeaeaf16bd"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61e1591ed9376e5eddda202ec229eddc56c612b61ac6ad07f96b91460bb6c2fb"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2c75269f8205b2690db4572a4a36fe47cd1338e4368bc73a7a0e48789e2e35a"}, + {file = "ujson-5.10.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7223f41e5bf1f919cd8d073e35b229295aa8e0f7b5de07ed1c8fddac63a6bc5d"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d4dc2fd6b3067c0782e7002ac3b38cf48608ee6366ff176bbd02cf969c9c20fe"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:232cc85f8ee3c454c115455195a205074a56ff42608fd6b942aa4c378ac14dd7"}, + {file = "ujson-5.10.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:cc6139531f13148055d691e442e4bc6601f6dba1e6d521b1585d4788ab0bfad4"}, + {file = "ujson-5.10.0-cp38-cp38-win32.whl", hash = "sha256:e7ce306a42b6b93ca47ac4a3b96683ca554f6d35dd8adc5acfcd55096c8dfcb8"}, + {file = "ujson-5.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:e82d4bb2138ab05e18f089a83b6564fee28048771eb63cdecf4b9b549de8a2cc"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dfef2814c6b3291c3c5f10065f745a1307d86019dbd7ea50e83504950136ed5b"}, + {file = "ujson-5.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4734ee0745d5928d0ba3a213647f1c4a74a2a28edc6d27b2d6d5bd9fa4319e27"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47ebb01bd865fdea43da56254a3930a413f0c5590372a1241514abae8aa7c76"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dee5e97c2496874acbf1d3e37b521dd1f307349ed955e62d1d2f05382bc36dd5"}, + {file = "ujson-5.10.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7490655a2272a2d0b072ef16b0b58ee462f4973a8f6bbe64917ce5e0a256f9c0"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:ba17799fcddaddf5c1f75a4ba3fd6441f6a4f1e9173f8a786b42450851bd74f1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2aff2985cef314f21d0fecc56027505804bc78802c0121343874741650a4d3d1"}, + {file = "ujson-5.10.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:ad88ac75c432674d05b61184178635d44901eb749786c8eb08c102330e6e8996"}, + {file = "ujson-5.10.0-cp39-cp39-win32.whl", hash = "sha256:2544912a71da4ff8c4f7ab5606f947d7299971bdd25a45e008e467ca638d13c9"}, + {file = "ujson-5.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:3ff201d62b1b177a46f113bb43ad300b424b7847f9c5d38b1b4ad8f75d4a282a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:5b6fee72fa77dc172a28f21693f64d93166534c263adb3f96c413ccc85ef6e64"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:61d0af13a9af01d9f26d2331ce49bb5ac1fb9c814964018ac8df605b5422dcb3"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ecb24f0bdd899d368b715c9e6664166cf694d1e57be73f17759573a6986dd95a"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbd8fd427f57a03cff3ad6574b5e299131585d9727c8c366da4624a9069ed746"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:beeaf1c48e32f07d8820c705ff8e645f8afa690cca1544adba4ebfa067efdc88"}, + {file = "ujson-5.10.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:baed37ea46d756aca2955e99525cc02d9181de67f25515c468856c38d52b5f3b"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7663960f08cd5a2bb152f5ee3992e1af7690a64c0e26d31ba7b3ff5b2ee66337"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8640fb4072d36b08e95a3a380ba65779d356b2fee8696afeb7794cf0902d0a1"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78778a3aa7aafb11e7ddca4e29f46bc5139131037ad628cc10936764282d6753"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0111b27f2d5c820e7f2dbad7d48e3338c824e7ac4d2a12da3dc6061cc39c8e6"}, + {file = "ujson-5.10.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c66962ca7565605b355a9ed478292da628b8f18c0f2793021ca4425abf8b01e5"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba43cc34cce49cf2d4bc76401a754a81202d8aa926d0e2b79f0ee258cb15d3a4"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ac56eb983edce27e7f51d05bc8dd820586c6e6be1c5216a6809b0c668bb312b8"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44bd4b23a0e723bf8b10628288c2c7c335161d6840013d4d5de20e48551773b"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c10f4654e5326ec14a46bcdeb2b685d4ada6911050aa8baaf3501e57024b804"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de4971a89a762398006e844ae394bd46991f7c385d7a6a3b93ba229e6dac17e"}, + {file = "ujson-5.10.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e1402f0564a97d2a52310ae10a64d25bcef94f8dd643fcf5d310219d915484f7"}, + {file = "ujson-5.10.0.tar.gz", hash = "sha256:b3cd8f3c5d8c7738257f1018880444f7b7d9b66232c64649f562d7ba86ad4bc1"}, +] + +[[package]] +name = "undictify" +version = "0.11.3" +description = "Type-checked function calls at runtime" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "undictify-0.11.3-py3-none-any.whl", hash = "sha256:4bfdc075b2f06ee027b05e241434c8efcbebf6c83fcc5b8d9d8def56dab4b5ff"}, + {file = "undictify-0.11.3.tar.gz", hash = "sha256:1481170ed8b9862c033e7549d817b90cead6002677c602d1bbdbf8ea15100098"}, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvloop" +version = "0.21.0" +description = "Fast implementation of asyncio event loop on top of libuv" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" +files = [ + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, +] + +[package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] +docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] + +[[package]] +name = "websocket-client" +version = "1.8.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, + {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "websockets" +version = "15.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, +] + +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wsproto" +version = "1.2.0" +description = "WebSockets state-machine based protocol implementation" +optional = false +python-versions = ">=3.7.0" +groups = ["main"] +files = [ + {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, + {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, +] + +[package.dependencies] +h11 = ">=0.9.0,<1" + +[[package]] +name = "yarl" +version = "1.20.0" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19"}, + {file = "yarl-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d"}, + {file = "yarl-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5"}, + {file = "yarl-1.20.0-cp311-cp311-win32.whl", hash = "sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6"}, + {file = "yarl-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b"}, + {file = "yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64"}, + {file = "yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384"}, + {file = "yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62"}, + {file = "yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f"}, + {file = "yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac"}, + {file = "yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0"}, + {file = "yarl-1.20.0-cp39-cp39-win32.whl", hash = "sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8"}, + {file = "yarl-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7"}, + {file = "yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124"}, + {file = "yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + +[metadata] +lock-version = "2.1" +python-versions = "^3.13" +content-hash = "61d5d168a102d274f2557b8dcccca6a4ac9f69825b2647ed52bbd62858d4d0c7" diff --git a/projects/background_jobs/pyproject.toml b/projects/renku_data_tasks/pyproject.toml similarity index 67% rename from projects/background_jobs/pyproject.toml rename to projects/renku_data_tasks/pyproject.toml index e9f8a995a..4bc5e716a 100644 --- a/projects/background_jobs/pyproject.toml +++ b/projects/renku_data_tasks/pyproject.toml @@ -1,82 +1,92 @@ -[tool.poetry] -name = "background_jobs" +[project] +name = "renku_data_tasks" version = "0.1.0" description = "" -authors = ['Swiss Data Science Center '] +authors = [ + { name = "Swiss Data Science Center", email = "contact@datascience.ch" }, +] license = "" +requires-python = ">=3.13" +dynamic = ["dependencies"] +[tool.poetry] packages = [ - { include = "renku_data_services/background_jobs", from = "../../bases" }, - { include = "renku_data_services/data_api", from = "../../bases" }, - { include = "renku_data_services/users", from = "../../components" }, - { include = "renku_data_services/errors", from = "../../components" }, + { include = "renku_data_services/data_tasks", from = "../../bases" }, + { include = "renku_data_services/app_config", from = "../../components" }, + { include = "renku_data_services/authn", from = "../../components" }, + { include = "renku_data_services/authz", from = "../../components" }, { include = "renku_data_services/base_api", from = "../../components" }, { include = "renku_data_services/base_models", from = "../../components" }, { include = "renku_data_services/base_orm", from = "../../components" }, - { include = "renku_data_services/users", from = "../../components" }, - { include = "renku_data_services/base_api", from = "../../components" }, - { include = "renku_data_services/message_queue", from = "../../components" }, + { include = "renku_data_services/crc", from = "../../components" }, + { include = "renku_data_services/connected_services", from = "../../components" }, { include = "renku_data_services/db_config", from = "../../components" }, + { include = "renku_data_services/errors", from = "../../components" }, + { include = "renku_data_services/git", from = "../../components" }, { include = "renku_data_services/k8s", from = "../../components" }, - { include = "renku_data_services/crc", from = "../../components" }, - { include = "renku_data_services/project", from = "../../components" }, - { include = "renku_data_services/authz", from = "../../components" }, - { include = "renku_data_services/storage", from = "../../components" }, + { include = "renku_data_services/k8s_watcher", from = "../../components" }, + { include = "renku_data_services/message_queue", from = "../../components" }, { include = "renku_data_services/namespace", from = "../../components" }, - { include = "renku_data_services/utils", from = "../../components" }, - { include = "renku_data_services/secrets", from = "../../components" }, - { include = "renku_data_services/authn", from = "../../components" }, - { include = "renku_data_services/app_config", from = "../../components" }, - { include = "renku_data_services/git", from = "../../components" }, - { include = "renku_data_services/connected_services", from = "../../components" }, + { include = "renku_data_services/platform", from = "../../components" }, + { include = "renku_data_services/project", from = "../../components" }, { include = "renku_data_services/repositories", from = "../../components" }, + { include = "renku_data_services/secrets", from = "../../components" }, { include = "renku_data_services/session", from = "../../components" }, - { include = "renku_data_services/platform", from = "../../components" }, + { include = "renku_data_services/storage", from = "../../components" }, + { include = "renku_data_services/users", from = "../../components" }, + { include = "renku_data_services/utils", from = "../../components" }, { include = "renku_data_services/data_connectors", from = "../../components" }, - { include = "renku_data_services/migrations", from = "../../components" }, { include = "renku_data_services/notebooks", from = "../../components" }, + # Note: poetry poly does not detect the migrations as dependencies, but they are. Don't remove these! + { include = "renku_data_services/migrations", from = "../../components" }, + { include = "renku_data_services/solr", from = "../../components" }, + { include = "renku_data_services/search", from = "../../components" }, + { include = "renku_data_services/metrics", from = "../../components" }, ] [tool.poetry.dependencies] -python = "^3.12" -alembic = "^1.14.0" -pydantic = {extras = ["email"], version = "^2.10.2"} -requests = "^2.32.3" -sqlalchemy = "^2.0.36" +python = "^3.13" +sanic = { extras = ["ext"], version = "^24.12.0" } +pydantic = { extras = ["email"], version = "^2.10.6" } +datamodel-code-generator = "^0.24.2" +sqlalchemy = { extras = ["asyncio"], version = "^2.0.38" } +alembic = "^1.14.1" asyncpg = "^0.30.0" -sanic-ext = "^23.6.0" -psycopg = { extras = ["binary"], version = "^3.2.3" } -authlib = "^1.3.2" -sanic = { extras = ["ext"], version = "^24.6.0" } -redis = "^5.2.0" -dataclasses-avroschema = "^0.65.4" -undictify = "^0.11.3" +pyjwt = { extras = ["crypto"], version = "^2.10.1" } tenacity = "^9.0.0" httpx = "<0.29" -python-gitlab = "^5.1.0" -pyjwt = { extras = ["crypto"], version = "^2.10.1" } kubernetes = "^31.0.0" python-ulid = "^3.0.0" +python-gitlab = "^5.6.0" +psycopg = { version = "^3.2.3", extras = ["binary"] } +urllib3 = "^2.3.0" deepmerge = "^2.0" -authzed = "^1.1.0" +authlib = "^1.5.0" +undictify = "^0.11.3" prometheus-sanic = "^3.0.0" -sentry-sdk = { version = "^2.19.0", extras = ["sanic"] } +sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } +authzed = "^1.20.0" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore -setuptools = { version = "^75.6.0" } +setuptools = { version = "^75.8.2" } aiofile = "^3.9.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. -protobuf = "^5.29.0" +protobuf = "^5.29.3" +cryptography = "^44.0.1" +kubernetes-asyncio = "^32.0.0" +marshmallow = "^3.26.1" escapism = "^1.0.1" -kr8s = "^0.18.1" -marshmallow = "^3.23.1" -toml = "^0.10.2" +kr8s = "^0.20.7" +python-box="^7.0.1" werkzeug = "^3.1.3" +toml = "^0.10.2" +parsy = "^2.1" +sanic-ext = "^23.12.0" +posthog = "^3.21.0" +markdown-code-runner = "^2.2.0" [tool.poetry.group.dev.dependencies] -pyavro-gen = "^0.3.3" -fakeredis = "^2.26.1" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/projects/secrets_storage/Dockerfile b/projects/secrets_storage/Dockerfile index a5c86ebf5..7a863b067 100644 --- a/projects/secrets_storage/Dockerfile +++ b/projects/secrets_storage/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.12-bookworm as builder +FROM python:3.13-bookworm AS builder ARG DEV_BUILD=false ARG USER_UID=1000 ARG USER_GID=$USER_UID @@ -13,7 +13,8 @@ RUN python3 -m pip install --user pipx && \ /home/renku/.local/bin/pipx install virtualenv && \ /home/renku/.local/bin/virtualenv env && \ /home/renku/.local/bin/poetry self add poetry-multiproject-plugin && \ - /home/renku/.local/bin/poetry self add poetry-polylith-plugin + /home/renku/.local/bin/poetry self add poetry-polylith-plugin && \ + /home/renku/.local/bin/poetry self add poetry-plugin-export COPY --chown=$USER_UID:$USER_GID . . RUN if $DEV_BUILD ; then \ /home/renku/.local/bin/poetry export -o requirements.txt --with dev; \ @@ -21,10 +22,10 @@ RUN if $DEV_BUILD ; then \ /home/renku/.local/bin/poetry export -o requirements.txt; \ fi && \ env/bin/pip install -r requirements.txt -RUN /home/renku/.local/bin/poetry build-project -f wheel -C projects/secrets_storage +RUN /home/renku/.local/bin/poetry -C projects/secrets_storage build-project -f wheel --custom-temp-path=/tmp RUN env/bin/pip --no-cache-dir install projects/secrets_storage/dist/*.whl -FROM python:3.12-slim-bookworm +FROM python:3.13-slim-bookworm ARG USER_UID=1000 ARG USER_GID=$USER_UID ENV prometheus_multiproc_dir=/prometheus diff --git a/projects/secrets_storage/poetry.lock b/projects/secrets_storage/poetry.lock index f9267c9a0..d0dfee28f 100644 --- a/projects/secrets_storage/poetry.lock +++ b/projects/secrets_storage/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiofile" @@ -6,6 +6,7 @@ version = "3.9.0" description = "Asynchronous file operations." optional = false python-versions = "<4,>=3.8" +groups = ["main"] files = [ {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, @@ -20,6 +21,7 @@ version = "24.1.0" description = "File support for asyncio." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, @@ -27,22 +29,23 @@ files = [ [[package]] name = "alembic" -version = "1.14.0" +version = "1.15.2" description = "A database migration tool for SQLAlchemy." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "alembic-1.14.0-py3-none-any.whl", hash = "sha256:99bd884ca390466db5e27ffccff1d179ec5c05c965cfefc0607e69f9e411cb25"}, - {file = "alembic-1.14.0.tar.gz", hash = "sha256:b00892b53b3642d0b8dbedba234dbf1924b69be83a9a769d5a624b01094e304b"}, + {file = "alembic-1.15.2-py3-none-any.whl", hash = "sha256:2e76bd916d547f6900ec4bb5a90aeac1485d2c92536923d0b138c02b126edc53"}, + {file = "alembic-1.15.2.tar.gz", hash = "sha256:1c72391bbdeffccfe317eefba686cb9a3c078005478885413b95c3b26c57a8a7"}, ] [package.dependencies] Mako = "*" -SQLAlchemy = ">=1.3.0" -typing-extensions = ">=4" +SQLAlchemy = ">=1.4.0" +typing-extensions = ">=4.12" [package.extras] -tz = ["backports.zoneinfo"] +tz = ["tzdata"] [[package]] name = "annotated-types" @@ -50,6 +53,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -57,13 +61,14 @@ files = [ [[package]] name = "anyio" -version = "4.4.0" +version = "4.9.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "anyio-4.4.0-py3-none-any.whl", hash = "sha256:c1b2d8f46a8a812513012e1107cb0e68c17159a7a594208005a57dc776e1bdc7"}, - {file = "anyio-4.4.0.tar.gz", hash = "sha256:5aadc6a1bbb7cdb0bede386cac5e2940f5e2ff3aa20277e991cf028e0585ce94"}, + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, ] [package.dependencies] @@ -71,19 +76,20 @@ idna = ">=2.8" sniffio = ">=1.1" [package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] -trio = ["trio (>=0.23)"] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] [[package]] name = "argcomplete" -version = "3.4.0" +version = "3.6.2" description = "Bash tab completion for argparse" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "argcomplete-3.4.0-py3-none-any.whl", hash = "sha256:69a79e083a716173e5532e0fa3bef45f793f4e61096cf52b5a42c0211c8b8aa5"}, - {file = "argcomplete-3.4.0.tar.gz", hash = "sha256:c2abcdfe1be8ace47ba777d4fce319eb13bf8ad9dace8d085dcad6eded88057f"}, + {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, + {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, ] [package.extras] @@ -95,6 +101,7 @@ version = "0.3.1" description = "Helpers to use cachetools with async code." optional = false python-versions = ">=3.8,<4.0" +groups = ["main"] files = [ {file = "asyncache-0.3.1-py3-none-any.whl", hash = "sha256:ef20a1024d265090dd1e0785c961cf98b9c32cc7d9478973dcf25ac1b80011f5"}, {file = "asyncache-0.3.1.tar.gz", hash = "sha256:9a1e60a75668e794657489bdea6540ee7e3259c483517b934670db7600bf5035"}, @@ -109,6 +116,7 @@ version = "0.30.0" description = "An asyncio PostgreSQL driver" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e"}, {file = "asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0"}, @@ -163,18 +171,19 @@ files = [ [package.extras] docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] -gssauth = ["gssapi", "sspilib"] -test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi", "k5test", "mypy (>=1.8.0,<1.9.0)", "sspilib", "uvloop (>=0.15.3)"] +gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] +test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] [[package]] name = "authlib" -version = "1.3.2" +version = "1.6.0" description = "The ultimate Python library in building OAuth and OpenID Connect servers and clients." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "Authlib-1.3.2-py2.py3-none-any.whl", hash = "sha256:ede026a95e9f5cdc2d4364a52103f5405e75aa156357e831ef2bfd0bc5094dfc"}, - {file = "authlib-1.3.2.tar.gz", hash = "sha256:4b16130117f9eb82aa6eec97f6dd4673c3f960ac0283ccdae2897ee4bc030ba2"}, + {file = "authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d"}, + {file = "authlib-1.6.0.tar.gz", hash = "sha256:4367d32031b7af175ad3a323d571dc7257b7099d55978087ceae4a0d88cd3210"}, ] [package.dependencies] @@ -182,13 +191,14 @@ cryptography = "*" [[package]] name = "authzed" -version = "1.1.0" +version = "1.21.1" description = "Client library for SpiceDB." optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.9" +groups = ["main"] files = [ - {file = "authzed-1.1.0-py3-none-any.whl", hash = "sha256:1c37038655c55d054b5caf918d60d680262fda4bc2787dc83576b4424e358214"}, - {file = "authzed-1.1.0.tar.gz", hash = "sha256:6e1300ff75af1840acdb3e0b2bc0dec31a8cf631c4ac6fc1ac674b9ea02d043a"}, + {file = "authzed-1.21.1-py3-none-any.whl", hash = "sha256:9a830c0e9eefc506181f0d82c9a9f73405db46d50e8ecaedd4488486a2792959"}, + {file = "authzed-1.21.1.tar.gz", hash = "sha256:c354d19af5ef1a393381d5be670dd946916742573ae2bf3ac87becdbf44f093b"}, ] [package.dependencies] @@ -196,54 +206,50 @@ googleapis-common-protos = ">=1.65.0,<2.0.0" grpc-interceptor = ">=0.15.4,<0.16.0" grpcio = ">=1.63,<2.0" protobuf = ">=5.26,<6" +protovalidate = ">=0.7.1,<0.8.0" [[package]] -name = "avro-preprocessor" -version = "0.3.0" -description = "A preprocessor for Avro Schemata" +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" optional = false -python-versions = "*" +python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ - {file = "avro-preprocessor-0.3.0.tar.gz", hash = "sha256:0470941009f49a02f8d6f0357a459b10ad15c6d4e8470f4055074176e654d716"}, - {file = "avro_preprocessor-0.3.0-py3-none-any.whl", hash = "sha256:da402ca763a3304c29f2237ed0f3cc5024dce2e0f60da433e9f93900c28b8400"}, + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, ] -[package.dependencies] -networkx = ">=2.8.7" -pygments = ">=2.13.0" -requests = ">=2.28.1" -"ruamel.yaml" = ">=0.17.21" -"ruamel.yaml.clib" = ">=0.2.6" - [[package]] name = "black" -version = "24.4.2" +version = "25.1.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dd1b5a14e417189db4c7b64a6540f31730713d173f0b63e55fabd52d61d8fdce"}, - {file = "black-24.4.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e537d281831ad0e71007dcdcbe50a71470b978c453fa41ce77186bbe0ed6021"}, - {file = "black-24.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaea3008c281f1038edb473c1aa8ed8143a5535ff18f978a318f10302b254063"}, - {file = "black-24.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:7768a0dbf16a39aa5e9a3ded568bb545c8c2727396d063bbaf847df05b08cd96"}, - {file = "black-24.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:257d724c2c9b1660f353b36c802ccece186a30accc7742c176d29c146df6e474"}, - {file = "black-24.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bdde6f877a18f24844e381d45e9947a49e97933573ac9d4345399be37621e26c"}, - {file = "black-24.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e151054aa00bad1f4e1f04919542885f89f5f7d086b8a59e5000e6c616896ffb"}, - {file = "black-24.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:7e122b1c4fb252fd85df3ca93578732b4749d9be076593076ef4d07a0233c3e1"}, - {file = "black-24.4.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:accf49e151c8ed2c0cdc528691838afd217c50412534e876a19270fea1e28e2d"}, - {file = "black-24.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:88c57dc656038f1ab9f92b3eb5335ee9b021412feaa46330d5eba4e51fe49b04"}, - {file = "black-24.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be8bef99eb46d5021bf053114442914baeb3649a89dc5f3a555c88737e5e98fc"}, - {file = "black-24.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:415e686e87dbbe6f4cd5ef0fbf764af7b89f9057b97c908742b6008cc554b9c0"}, - {file = "black-24.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf10f7310db693bb62692609b397e8d67257c55f949abde4c67f9cc574492cc7"}, - {file = "black-24.4.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:98e123f1d5cfd42f886624d84464f7756f60ff6eab89ae845210631714f6db94"}, - {file = "black-24.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48a85f2cb5e6799a9ef05347b476cce6c182d6c71ee36925a6c194d074336ef8"}, - {file = "black-24.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:b1530ae42e9d6d5b670a34db49a94115a64596bc77710b1d05e9801e62ca0a7c"}, - {file = "black-24.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37aae07b029fa0174d39daf02748b379399b909652a806e5708199bd93899da1"}, - {file = "black-24.4.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da33a1a5e49c4122ccdfd56cd021ff1ebc4a1ec4e2d01594fef9b6f267a9e741"}, - {file = "black-24.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef703f83fc32e131e9bcc0a5094cfe85599e7109f896fe8bc96cc402f3eb4b6e"}, - {file = "black-24.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:b9176b9832e84308818a99a561e90aa479e73c523b3f77afd07913380ae2eab7"}, - {file = "black-24.4.2-py3-none-any.whl", hash = "sha256:d36ed1124bb81b32f8614555b34cc4259c3fbc7eec17870e8ff8ded335b58d8c"}, - {file = "black-24.4.2.tar.gz", hash = "sha256:c872b53057f000085da66a19c55d68f6f8ddcac2642392ad3a355878406fbd4d"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, + {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, + {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, + {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, + {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, + {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, + {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, + {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, + {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, + {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, + {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, + {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, + {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, + {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, + {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, + {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, + {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, + {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, + {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, + {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, + {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, + {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, ] [package.dependencies] @@ -255,126 +261,159 @@ platformdirs = ">=2" [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "cachetools" -version = "5.3.3" +version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "cachetools-5.3.3-py3-none-any.whl", hash = "sha256:0abad1021d3f8325b2fc1d2e9c8b9c9d57b04c3932657a72465447332c24d945"}, - {file = "cachetools-5.3.3.tar.gz", hash = "sha256:ba29e2dfa0b8b556606f097407ed1aa62080ee108ab0dc5ec9d6a723a007d105"}, + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] [[package]] name = "caio" -version = "0.9.17" +version = "0.9.24" description = "Asynchronous file IO for Linux MacOS or Windows." optional = false -python-versions = "<4,>=3.7" -files = [ - {file = "caio-0.9.17-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3f69395fdd45c115b2ef59732e3c8664722a2b51de2d6eedb3d354b2f5f3be3c"}, - {file = "caio-0.9.17-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3028b746e9ec7f6d6ebb386a7fd8caf0eebed5d6e6b4f18c8ef25861934b1673"}, - {file = "caio-0.9.17-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:079730a353bbde03796fab681e969472eace09ffbe5000e584868a7fe389ba6f"}, - {file = "caio-0.9.17-cp311-cp311-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:549caa51b475877fe32856a26fe937366ae7a1c23a9727005b441db9abb12bcc"}, - {file = "caio-0.9.17-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0ddb253b145a53ecca76381677ce465bc5efeaecb6aaf493fac43ae79659f0fb"}, - {file = "caio-0.9.17-cp312-cp312-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e320b0ea371c810359934f8e8fe81777c493cc5fb4d41de44277cbe7336e74"}, - {file = "caio-0.9.17-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:a39a49e279f82aa022f0786339d45d9550b5aa3e46eec7d08e0f351c503df0a5"}, - {file = "caio-0.9.17-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b3e96925b9f15f43e6ef1d42a83edfd937eb11a984cb6ef7c10527e963595497"}, - {file = "caio-0.9.17-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:fca916240597005d2b734f1442fa3c3cfb612bf46e0978b5232e5492a371de38"}, - {file = "caio-0.9.17-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40bd0afbd3491d1e407bcf74e3a9e9cc67a7f290ed29518325194184d63cc2b6"}, - {file = "caio-0.9.17-py3-none-any.whl", hash = "sha256:c55d4dc6b3a36f93237ecd6360e1c131c3808bc47d4191a130148a99b80bb311"}, - {file = "caio-0.9.17.tar.gz", hash = "sha256:8f30511526814d961aeef389ea6885273abe6c655f1e08abbadb95d12fdd9b4f"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "caio-0.9.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d80322126a97ba572412b17b2f086ff95195de2c4261deb19db6bfcdc9ef7540"}, + {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:37bc172349686139e8dc97fff7662c67b1837e18a67b99e8ef25585f2893d013"}, + {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ad7f0902bf952237e120606252c14ab3cb05995c9f79f39154b5248744864832"}, + {file = "caio-0.9.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:925b9e3748ce1a79386dfb921c0aee450e43225534551abd1398b1c08f9ba29f"}, + {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:3b4dc0a8fb9a58ab40f967ad5a8a858cc0bfb2348a580b4142595849457f9c9a"}, + {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fa74d111b3b165bfad2e333367976bdf118bcf505a1cb44d3bcddea2849e3297"}, + {file = "caio-0.9.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae3566228383175265a7583107f21a7cb044a752ea29ba84fce7c1a49a05903"}, + {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:a306b0dda91cb4ca3170f066c114597f8ea41b3da578574a9d2b54f86963de68"}, + {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:8ee158e56128d865fb7d57a9c9c22fca4e8aa8d8664859c977a36fff3ccb3609"}, + {file = "caio-0.9.24-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d47ef8d76aca74c17cb07339a441c5530fc4b8dd9222dfb1e1abd7f9f9b814f"}, + {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:d15fc746c4bf0077d75df05939d1e97c07ccaa8e580681a77021d6929f65d9f4"}, + {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9368eae0a9badd5f31264896c51b47431d96c0d46f1979018fb1d20c49f56156"}, + {file = "caio-0.9.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f0e5a645ef4e7bb7a81e10ae2a7aef14988cb2cb4354588c6bf6f6f3f6de72a"}, + {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:08304fa80af7771c78a5bcc923449c7ec8134d589b50d48c66320f85552c7ae2"}, + {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:5339ced0764e10242a50ccb21db7f0d9c359881db0f72fa2c5e45ed828ffacf7"}, + {file = "caio-0.9.24.tar.gz", hash = "sha256:5bcdecaea02a9aa8e3acf0364eff8ad9903d57d70cdb274a42270126290a77f1"}, ] [package.extras] -develop = ["aiomisc-pytest", "pytest", "pytest-cov"] +develop = ["aiomisc-pytest", "coveralls", "pylama[toml]", "pytest", "pytest-cov", "setuptools"] [[package]] -name = "casefy" -version = "0.1.7" -description = "Utilities for string case conversion." +name = "cel-python" +version = "0.2.0" +description = "Pure Python implementation of Google Common Expression Language" optional = false -python-versions = ">=3.6" +python-versions = "<4.0,>=3.8" +groups = ["main"] files = [ - {file = "casefy-0.1.7-py3-none-any.whl", hash = "sha256:ab05ff1c67f2a8e62d9f8986fa9a849416d61ac5413ec57d1f827b4f36589cf6"}, - {file = "casefy-0.1.7.tar.gz", hash = "sha256:6accce985a64b9edb2a610a29ac489d78fac80e52ff8f2d137e294f2f92b8027"}, + {file = "cel_python-0.2.0-py3-none-any.whl", hash = "sha256:478ff73def7b39d51e6982f95d937a57c2b088c491c578fe5cecdbd79f476f60"}, + {file = "cel_python-0.2.0.tar.gz", hash = "sha256:75de72a5cf223ec690b236f0cc24da267219e667bd3e7f8f4f20595fcc1c0c0f"}, ] +[package.dependencies] +jmespath = ">=1.0.1,<2.0.0" +lark = ">=0.12.0,<0.13.0" +python-dateutil = ">=2.9.0.post0,<3.0.0" +pyyaml = ">=6.0.1,<7.0.0" +types-python-dateutil = ">=2.9.0.20240316,<3.0.0.0" +types-pyyaml = ">=6.0.12.20240311,<7.0.0.0" + [[package]] name = "certifi" -version = "2024.6.2" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "certifi-2024.6.2-py3-none-any.whl", hash = "sha256:ddc6c8ce995e6987e7faf5e3f1b02b302836a0e5d98ece18392cb1a36c72ad56"}, - {file = "certifi-2024.6.2.tar.gz", hash = "sha256:3cd43f1c6fa7dedc5899d69d3ad0398fd018ad1a17fba83ddaf78aa46c747516"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] name = "cffi" -version = "1.16.0" +version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" -files = [ - {file = "cffi-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6b3d6606d369fc1da4fd8c357d026317fbb9c9b75d36dc16e90e84c26854b088"}, - {file = "cffi-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ac0f5edd2360eea2f1daa9e26a41db02dd4b0451b48f7c318e217ee092a213e9"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e61e3e4fa664a8588aa25c883eab612a188c725755afff6289454d6362b9673"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a72e8961a86d19bdb45851d8f1f08b041ea37d2bd8d4fd19903bc3083d80c896"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b50bf3f55561dac5438f8e70bfcdfd74543fd60df5fa5f62d94e5867deca684"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7651c50c8c5ef7bdb41108b7b8c5a83013bfaa8a935590c5d74627c047a583c7"}, - {file = "cffi-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4108df7fe9b707191e55f33efbcb2d81928e10cea45527879a4749cbe472614"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:32c68ef735dbe5857c810328cb2481e24722a59a2003018885514d4c09af9743"}, - {file = "cffi-1.16.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:673739cb539f8cdaa07d92d02efa93c9ccf87e345b9a0b556e3ecc666718468d"}, - {file = "cffi-1.16.0-cp310-cp310-win32.whl", hash = "sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a"}, - {file = "cffi-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b84834d0cf97e7d27dd5b7f3aca7b6e9263c56308ab9dc8aae9784abb774d404"}, - {file = "cffi-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b8ebc27c014c59692bb2664c7d13ce7a6e9a629be20e54e7271fa696ff2b417"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee07e47c12890ef248766a6e55bd38ebfb2bb8edd4142d56db91b21ea68b7627"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8a9d3ebe49f084ad71f9269834ceccbf398253c9fac910c4fd7053ff1386936"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e70f54f1796669ef691ca07d046cd81a29cb4deb1e5f942003f401c0c4a2695d"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5bf44d66cdf9e893637896c7faa22298baebcd18d1ddb6d2626a6e39793a1d56"}, - {file = "cffi-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b78010e7b97fef4bee1e896df8a4bbb6712b7f05b7ef630f9d1da00f6444d2e"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c6a164aa47843fb1b01e941d385aab7215563bb8816d80ff3a363a9f8448a8dc"}, - {file = "cffi-1.16.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09f3ff613345df5e8c3667da1d918f9149bd623cd9070c983c013792a9a62eb"}, - {file = "cffi-1.16.0-cp311-cp311-win32.whl", hash = "sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab"}, - {file = "cffi-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:fa3a0128b152627161ce47201262d3140edb5a5c3da88d73a1b790a959126956"}, - {file = "cffi-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:68e7c44931cc171c54ccb702482e9fc723192e88d25a0e133edd7aff8fcd1f6e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abd808f9c129ba2beda4cfc53bde801e5bcf9d6e0f22f095e45327c038bfe68e"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88e2b3c14bdb32e440be531ade29d3c50a1a59cd4e51b1dd8b0865c54ea5d2e2"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcc8eb6d5902bb1cf6dc4f187ee3ea80a1eba0a89aba40a5cb20a5087d961357"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7be2d771cdba2942e13215c4e340bfd76398e9227ad10402a8767ab1865d2e6"}, - {file = "cffi-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e715596e683d2ce000574bae5d07bd522c781a822866c20495e52520564f0969"}, - {file = "cffi-1.16.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2d92b25dbf6cae33f65005baf472d2c245c050b1ce709cc4588cdcdd5495b520"}, - {file = "cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b"}, - {file = "cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235"}, - {file = "cffi-1.16.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c9ef6ff37e974b73c25eecc13952c55bceed9112be2d9d938ded8e856138bcc"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a09582f178759ee8128d9270cd1344154fd473bb77d94ce0aeb2a93ebf0feaf0"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e760191dd42581e023a68b758769e2da259b5d52e3103c6060ddc02c9edb8d7b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80876338e19c951fdfed6198e70bc88f1c9758b94578d5a7c4c91a87af3cf31c"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a6a14b17d7e17fa0d207ac08642c8820f84f25ce17a442fd15e27ea18d67c59b"}, - {file = "cffi-1.16.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6602bc8dc6f3a9e02b6c22c4fc1e47aa50f8f8e6d3f78a5e16ac33ef5fefa324"}, - {file = "cffi-1.16.0-cp38-cp38-win32.whl", hash = "sha256:131fd094d1065b19540c3d72594260f118b231090295d8c34e19a7bbcf2e860a"}, - {file = "cffi-1.16.0-cp38-cp38-win_amd64.whl", hash = "sha256:31d13b0f99e0836b7ff893d37af07366ebc90b678b6664c955b54561fc36ef36"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:582215a0e9adbe0e379761260553ba11c58943e4bbe9c36430c4ca6ac74b15ed"}, - {file = "cffi-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b29ebffcf550f9da55bec9e02ad430c992a87e5f512cd63388abb76f1036d8d2"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dc9b18bf40cc75f66f40a7379f6a9513244fe33c0e8aa72e2d56b0196a7ef872"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cb4a35b3642fc5c005a6755a5d17c6c8b6bcb6981baf81cea8bfbc8903e8ba8"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b86851a328eedc692acf81fb05444bdf1891747c25af7529e39ddafaf68a4f3f"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c0f31130ebc2d37cdd8e44605fb5fa7ad59049298b3f745c74fa74c62fbfcfc4"}, - {file = "cffi-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8e709127c6c77446a8c0a8c8bf3c8ee706a06cd44b1e827c3e6a2ee6b8c098"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:748dcd1e3d3d7cd5443ef03ce8685043294ad6bd7c02a38d1bd367cfd968e000"}, - {file = "cffi-1.16.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8895613bcc094d4a1b2dbe179d88d7fb4a15cee43c052e8885783fac397d91fe"}, - {file = "cffi-1.16.0-cp39-cp39-win32.whl", hash = "sha256:ed86a35631f7bfbb28e108dd96773b9d5a6ce4811cf6ea468bb6a359b256b1e4"}, - {file = "cffi-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:3686dffb02459559c74dd3d81748269ffb0eb027c39a6fc99502de37d501faa8"}, - {file = "cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0"}, +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, ] [package.dependencies] @@ -382,112 +421,116 @@ pycparser = "*" [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -499,6 +542,8 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, @@ -506,99 +551,69 @@ files = [ [[package]] name = "cryptography" -version = "44.0.0" +version = "44.0.2" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = "!=3.9.0,!=3.9.1,>=3.7" -files = [ - {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"}, - {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"}, - {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"}, - {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"}, - {file = "cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd"}, - {file = "cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"}, - {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"}, - {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"}, - {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"}, - {file = "cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa"}, - {file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"}, - {file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"}, +groups = ["main"] +files = [ + {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, + {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, + {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, + {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, + {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, + {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, + {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, + {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, + {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, + {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, + {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, + {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, + {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, ] [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] -pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] +pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] -[[package]] -name = "dacite" -version = "1.8.1" -description = "Simple creation of data classes from dictionaries." -optional = false -python-versions = ">=3.6" -files = [ - {file = "dacite-1.8.1-py3-none-any.whl", hash = "sha256:cc31ad6fdea1f49962ea42db9421772afe01ac5442380d9a99fcf3d188c61afe"}, -] - -[package.extras] -dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] - -[[package]] -name = "dataclasses-avroschema" -version = "0.65.4" -description = "Generate Avro Schemas from Python classes. Serialize/Deserialize python instances with avro schemas" -optional = false -python-versions = "<4.0,>=3.9" -files = [ - {file = "dataclasses_avroschema-0.65.4-py3-none-any.whl", hash = "sha256:f9a12541c73dfd79d68be4e873b0045b38fc03f31457e76102c91c0df75958d9"}, - {file = "dataclasses_avroschema-0.65.4.tar.gz", hash = "sha256:d91c63b854b397595fb90946840fe02f29c1ca8cec000f3aa79f8f757aae0528"}, -] - -[package.dependencies] -casefy = ">=0.1.7,<0.2.0" -dacite = ">=1.8.0,<2.0.0" -fastavro = ">=1.7.3,<2.0.0" -inflection = ">=0.5.1,<0.6.0" -python-dateutil = ">=2.7,<3.0" -typing-extensions = ">=4.2.0,<5.0.0" - -[package.extras] -cli = ["dc-avro (>=0.6.4)"] -faker = ["faker (>=26.0.0,<31.0.0)"] -faust = ["faust-streaming (>=0.10.11,<0.12.0)"] -pydantic = ["pydantic[email] (>=2.4.2,<3.0.0)"] - [[package]] name = "datamodel-code-generator" version = "0.24.2" description = "Datamodel Code Generator" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "datamodel_code_generator-0.24.2-py3-none-any.whl", hash = "sha256:582c30466def12600d7165c5f624bb63a7e944eeaf8320f282518daf9ccb566c"}, {file = "datamodel_code_generator-0.24.2.tar.gz", hash = "sha256:d278c751038c8911efc82856ec549ac1e3e13134567387a4bb5ab7ddc6543162"}, @@ -626,6 +641,7 @@ version = "2.0" description = "A toolset for deeply merging Python dictionaries." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "deepmerge-2.0-py3-none-any.whl", hash = "sha256:6de9ce507115cff0bed95ff0ce9ecc31088ef50cbdf09bc90a09349a318b3d00"}, {file = "deepmerge-2.0.tar.gz", hash = "sha256:5c3d86081fbebd04dd5de03626a0607b809a98fb6ccba5770b62466fe940ff20"}, @@ -634,34 +650,49 @@ files = [ [package.extras] dev = ["black", "build", "mypy", "pytest", "pyupgrade", "twine", "validate-pyproject[all]"] +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + [[package]] name = "dnspython" -version = "2.6.1" +version = "2.7.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] [[package]] name = "durationpy" -version = "0.7" +version = "0.9" description = "Module for converting between datetime.timedelta and Go's Duration strings." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "durationpy-0.7.tar.gz", hash = "sha256:8447c43df4f1a0b434e70c15a38d77f5c9bd17284bfc1ff1d430f233d5083732"}, + {file = "durationpy-0.9-py3-none-any.whl", hash = "sha256:e65359a7af5cedad07fb77a2dd3f390f8eb0b74cb845589fa6c057086834dd38"}, + {file = "durationpy-0.9.tar.gz", hash = "sha256:fd3feb0a69a0057d582ef643c355c40d2fa1c942191f914d12203b1a01ac722a"}, ] [[package]] @@ -670,6 +701,7 @@ version = "2.2.0" description = "A robust email address syntax and deliverability validation library." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, @@ -685,117 +717,19 @@ version = "1.0.1" description = "Simple, generic API for escaping strings." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "escapism-1.0.1-py2.py3-none-any.whl", hash = "sha256:d28f19edc3cb1ffc36fa238956ecc068695477e748f57157c6dde00a6b77f229"}, {file = "escapism-1.0.1.tar.gz", hash = "sha256:73256bdfb4f22230f0428fc6efecee61cdc4fad531b6f98b849cb9c80711e4ec"}, ] -[[package]] -name = "factory-boy" -version = "3.3.0" -description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." -optional = false -python-versions = ">=3.7" -files = [ - {file = "factory_boy-3.3.0-py2.py3-none-any.whl", hash = "sha256:a2cdbdb63228177aa4f1c52f4b6d83fab2b8623bf602c7dedd7eb83c0f69c04c"}, - {file = "factory_boy-3.3.0.tar.gz", hash = "sha256:bc76d97d1a65bbd9842a6d722882098eb549ec8ee1081f9fb2e8ff29f0c300f1"}, -] - -[package.dependencies] -Faker = ">=0.7.0" - -[package.extras] -dev = ["Django", "Pillow", "SQLAlchemy", "coverage", "flake8", "isort", "mongoengine", "sqlalchemy-utils", "tox", "wheel (>=0.32.0)", "zest.releaser[recommended]"] -doc = ["Sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] - -[[package]] -name = "faker" -version = "25.9.1" -description = "Faker is a Python package that generates fake data for you." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Faker-25.9.1-py3-none-any.whl", hash = "sha256:f1dc27dc8035cb7e97e96afbb5fe1305eed6aeea53374702cbac96acfe851626"}, - {file = "Faker-25.9.1.tar.gz", hash = "sha256:0e1cf7a8d3c94de91a65ab1e9cf7050903efae1e97901f8e5924a9f45147ae44"}, -] - -[package.dependencies] -python-dateutil = ">=2.4" - -[[package]] -name = "fakeredis" -version = "2.26.1" -description = "Python implementation of redis API, can be used for testing purposes." -optional = false -python-versions = "<4.0,>=3.7" -files = [ - {file = "fakeredis-2.26.1-py3-none-any.whl", hash = "sha256:68a5615d7ef2529094d6958677e30a6d30d544e203a5ab852985c19d7ad57e32"}, - {file = "fakeredis-2.26.1.tar.gz", hash = "sha256:69f4daafe763c8014a6dbf44a17559c46643c95447b3594b3975251a171b806d"}, -] - -[package.dependencies] -redis = {version = ">=4.3", markers = "python_full_version > \"3.8.0\""} -sortedcontainers = ">=2,<3" - -[package.extras] -bf = ["pyprobables (>=0.6,<0.7)"] -cf = ["pyprobables (>=0.6,<0.7)"] -json = ["jsonpath-ng (>=1.6,<2.0)"] -lua = ["lupa (>=2.1,<3.0)"] -probabilistic = ["pyprobables (>=0.6,<0.7)"] - -[[package]] -name = "fastavro" -version = "1.9.4" -description = "Fast read/write of AVRO files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "fastavro-1.9.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:60cb38f07462a7fb4e4440ed0de67d3d400ae6b3d780f81327bebde9aa55faef"}, - {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:063d01d197fc929c20adc09ca9f0ca86d33ac25ee0963ce0b438244eee8315ae"}, - {file = "fastavro-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a9053fcfbc895f2a16a4303af22077e3a8fdcf1cd5d6ed47ff2ef22cbba2f0"}, - {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:02bf1276b7326397314adf41b34a4890f6ffa59cf7e0eb20b9e4ab0a143a1598"}, - {file = "fastavro-1.9.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56bed9eca435389a8861e6e2d631ec7f8f5dda5b23f93517ac710665bd34ca29"}, - {file = "fastavro-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:0cd2099c8c672b853e0b20c13e9b62a69d3fbf67ee7c59c7271ba5df1680310d"}, - {file = "fastavro-1.9.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:af8c6d8c43a02b5569c093fc5467469541ac408c79c36a5b0900d3dd0b3ba838"}, - {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4a138710bd61580324d23bc5e3df01f0b82aee0a76404d5dddae73d9e4c723f"}, - {file = "fastavro-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:903d97418120ca6b6a7f38a731166c1ccc2c4344ee5e0470d09eb1dc3687540a"}, - {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c443eeb99899d062dbf78c525e4614dd77e041a7688fa2710c224f4033f193ae"}, - {file = "fastavro-1.9.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ac26ab0774d1b2b7af6d8f4300ad20bbc4b5469e658a02931ad13ce23635152f"}, - {file = "fastavro-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:cf7247874c22be856ba7d1f46a0f6e0379a6025f1a48a7da640444cbac6f570b"}, - {file = "fastavro-1.9.4-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:68912f2020e1b3d70557260b27dd85fb49a4fc6bfab18d384926127452c1da4c"}, - {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6925ce137cdd78e109abdb0bc33aad55de6c9f2d2d3036b65453128f2f5f5b92"}, - {file = "fastavro-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b928cd294e36e35516d0deb9e104b45be922ba06940794260a4e5dbed6c192a"}, - {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:90c9838bc4c991ffff5dd9d88a0cc0030f938b3fdf038cdf6babde144b920246"}, - {file = "fastavro-1.9.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:eca6e54da571b06a3c5a72dbb7212073f56c92a6fbfbf847b91c347510f8a426"}, - {file = "fastavro-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a4b02839ac261100cefca2e2ad04cdfedc556cb66b5ec735e0db428e74b399de"}, - {file = "fastavro-1.9.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:4451ee9a305a73313a1558d471299f3130e4ecc10a88bf5742aa03fb37e042e6"}, - {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8524fccfb379565568c045d29b2ebf71e1f2c0dd484aeda9fe784ef5febe1a8"}, - {file = "fastavro-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33d0a00a6e09baa20f6f038d7a2ddcb7eef0e7a9980e947a018300cb047091b8"}, - {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:23d7e5b29c9bf6f26e8be754b2c8b919838e506f78ef724de7d22881696712fc"}, - {file = "fastavro-1.9.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2e6ab3ee53944326460edf1125b2ad5be2fadd80f7211b13c45fa0c503b4cf8d"}, - {file = "fastavro-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:64d335ec2004204c501f8697c385d0a8f6b521ac82d5b30696f789ff5bc85f3c"}, - {file = "fastavro-1.9.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:7e05f44c493e89e73833bd3ff3790538726906d2856f59adc8103539f4a1b232"}, - {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:253c63993250bff4ee7b11fb46cf3a4622180a783bedc82a24c6fdcd1b10ca2a"}, - {file = "fastavro-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24d6942eb1db14640c2581e0ecd1bbe0afc8a83731fcd3064ae7f429d7880cb7"}, - {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d47bb66be6091cd48cfe026adcad11c8b11d7d815a2949a1e4ccf03df981ca65"}, - {file = "fastavro-1.9.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c293897f12f910e58a1024f9c77f565aa8e23b36aafda6ad8e7041accc57a57f"}, - {file = "fastavro-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:f05d2afcb10a92e2a9e580a3891f090589b3e567fdc5641f8a46a0b084f120c3"}, - {file = "fastavro-1.9.4.tar.gz", hash = "sha256:56b8363e360a1256c94562393dc7f8611f3baf2b3159f64fb2b9c6b87b14e876"}, -] - -[package.extras] -codecs = ["cramjam", "lz4", "zstandard"] -lz4 = ["lz4"] -snappy = ["cramjam"] -zstandard = ["zstandard"] - [[package]] name = "genson" version = "1.3.0" description = "GenSON is a powerful, user-friendly JSON Schema generator." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7"}, {file = "genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37"}, @@ -803,13 +737,14 @@ files = [ [[package]] name = "google-auth" -version = "2.30.0" +version = "2.39.0" description = "Google Authentication Library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "google-auth-2.30.0.tar.gz", hash = "sha256:ab630a1320f6720909ad76a7dbdb6841cdf5c66b328d690027e4867bdfb16688"}, - {file = "google_auth-2.30.0-py2.py3-none-any.whl", hash = "sha256:8df7da660f62757388b8a7f249df13549b3373f24388cb5d2f1dd91cc18180b5"}, + {file = "google_auth-2.39.0-py2.py3-none-any.whl", hash = "sha256:0150b6711e97fb9f52fe599f55648950cc4540015565d8fbb31be2ad6e1548a2"}, + {file = "google_auth-2.39.0.tar.gz", hash = "sha256:73222d43cdc35a3aeacbfdcaf73142a97839f10de930550d89ebfe1d0a00cde7"}, ] [package.dependencies] @@ -818,94 +753,97 @@ pyasn1-modules = ">=0.2.1" rsa = ">=3.1.4,<5" [package.extras] -aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] -enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] -pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0)", "requests (>=2.20.0,<3.0.0)"] +enterprise-cert = ["cryptography", "pyopenssl"] +pyjwt = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyjwt (>=2.0)"] +pyopenssl = ["cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] reauth = ["pyu2f (>=0.1.5)"] -requests = ["requests (>=2.20.0,<3.0.0.dev0)"] +requests = ["requests (>=2.20.0,<3.0.0)"] +testing = ["aiohttp (<3.10.0)", "aiohttp (>=3.6.2,<4.0.0)", "aioresponses", "cryptography (<39.0.0) ; python_version < \"3.8\"", "cryptography (>=38.0.3)", "flask", "freezegun", "grpcio", "mock", "oauth2client", "packaging", "pyjwt (>=2.0)", "pyopenssl (<24.3.0)", "pyopenssl (>=20.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "pytest-localserver", "pyu2f (>=0.1.5)", "requests (>=2.20.0,<3.0.0)", "responses", "urllib3"] +urllib3 = ["packaging", "urllib3"] [[package]] name = "googleapis-common-protos" -version = "1.65.0" +version = "1.70.0" description = "Common protobufs used in Google APIs" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "googleapis_common_protos-1.65.0-py2.py3-none-any.whl", hash = "sha256:2972e6c496f435b92590fd54045060867f3fe9be2c82ab148fc8885035479a63"}, - {file = "googleapis_common_protos-1.65.0.tar.gz", hash = "sha256:334a29d07cddc3aa01dee4988f9afd9b2916ee2ff49d6b757155dc0d197852c0"}, + {file = "googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8"}, + {file = "googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257"}, ] [package.dependencies] -protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<6.0.0.dev0" +protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<7.0.0" [package.extras] -grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +grpc = ["grpcio (>=1.44.0,<2.0.0)"] [[package]] name = "greenlet" -version = "3.0.3" +version = "3.2.1" description = "Lightweight in-process concurrent programming" optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dca1e2f3ca00b84a396bc1bce13dd21f680f035314d2379c4160c98153b2059b"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ed7fb269f15dc662787f4119ec300ad0702fa1b19d2135a37c2c4de6fadfd4a"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd4f49ae60e10adbc94b45c0b5e6a179acc1736cf7a90160b404076ee283cf83"}, - {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:73a411ef564e0e097dbe7e866bb2dda0f027e072b04da387282b02c308807405"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7f362975f2d179f9e26928c5b517524e89dd48530a0202570d55ad6ca5d8a56f"}, - {file = "greenlet-3.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:649dde7de1a5eceb258f9cb00bdf50e978c9db1b996964cd80703614c86495eb"}, - {file = "greenlet-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:68834da854554926fbedd38c76e60c4a2e3198c6fbed520b106a8986445caaf9"}, - {file = "greenlet-3.0.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:b1b5667cced97081bf57b8fa1d6bfca67814b0afd38208d52538316e9422fc61"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:52f59dd9c96ad2fc0d5724107444f76eb20aaccb675bf825df6435acb7703559"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:afaff6cf5200befd5cec055b07d1c0a5a06c040fe5ad148abcd11ba6ab9b114e"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2797aa5aedac23af156bbb5a6aa2cd3427ada2972c828244eb7d1b9255846379"}, - {file = "greenlet-3.0.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b7f009caad047246ed379e1c4dbcb8b020f0a390667ea74d2387be2998f58a22"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c5e1536de2aad7bf62e27baf79225d0d64360d4168cf2e6becb91baf1ed074f3"}, - {file = "greenlet-3.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:894393ce10ceac937e56ec00bb71c4c2f8209ad516e96033e4b3b1de270e200d"}, - {file = "greenlet-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:1ea188d4f49089fc6fb283845ab18a2518d279c7cd9da1065d7a84e991748728"}, - {file = "greenlet-3.0.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:70fb482fdf2c707765ab5f0b6655e9cfcf3780d8d87355a063547b41177599be"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4d1ac74f5c0c0524e4a24335350edad7e5f03b9532da7ea4d3c54d527784f2e"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:149e94a2dd82d19838fe4b2259f1b6b9957d5ba1b25640d2380bea9c5df37676"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15d79dd26056573940fcb8c7413d84118086f2ec1a8acdfa854631084393efcc"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b7db1ebff4ba09aaaeae6aa491daeb226c8150fc20e836ad00041bcb11230"}, - {file = "greenlet-3.0.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1f672519db1796ca0d8753f9e78ec02355e862d0998193038c7073045899f305"}, - {file = "greenlet-3.0.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2516a9957eed41dd8f1ec0c604f1cdc86758b587d964668b5b196a9db5bfcde6"}, - {file = "greenlet-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:bba5387a6975598857d86de9eac14210a49d554a77eb8261cc68b7d082f78ce2"}, - {file = "greenlet-3.0.3-cp37-cp37m-macosx_11_0_universal2.whl", hash = "sha256:5b51e85cb5ceda94e79d019ed36b35386e8c37d22f07d6a751cb659b180d5274"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daf3cb43b7cf2ba96d614252ce1684c1bccee6b2183a01328c98d36fcd7d5cb0"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99bf650dc5d69546e076f413a87481ee1d2d09aaaaaca058c9251b6d8c14783f"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2dd6e660effd852586b6a8478a1d244b8dc90ab5b1321751d2ea15deb49ed414"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3391d1e16e2a5a1507d83e4a8b100f4ee626e8eca43cf2cadb543de69827c4c"}, - {file = "greenlet-3.0.3-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1f145462f1fa6e4a4ae3c0f782e580ce44d57c8f2c7aae1b6fa88c0b2efdb41"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1a7191e42732df52cb5f39d3527217e7ab73cae2cb3694d241e18f53d84ea9a7"}, - {file = "greenlet-3.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0448abc479fab28b00cb472d278828b3ccca164531daab4e970a0458786055d6"}, - {file = "greenlet-3.0.3-cp37-cp37m-win32.whl", hash = "sha256:b542be2440edc2d48547b5923c408cbe0fc94afb9f18741faa6ae970dbcb9b6d"}, - {file = "greenlet-3.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:01bc7ea167cf943b4c802068e178bbf70ae2e8c080467070d01bfa02f337ee67"}, - {file = "greenlet-3.0.3-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:1996cb9306c8595335bb157d133daf5cf9f693ef413e7673cb07e3e5871379ca"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ddc0f794e6ad661e321caa8d2f0a55ce01213c74722587256fb6566049a8b04"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c9db1c18f0eaad2f804728c67d6c610778456e3e1cc4ab4bbd5eeb8e6053c6fc"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7170375bcc99f1a2fbd9c306f5be8764eaf3ac6b5cb968862cad4c7057756506"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b66c9c1e7ccabad3a7d037b2bcb740122a7b17a53734b7d72a344ce39882a1b"}, - {file = "greenlet-3.0.3-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:098d86f528c855ead3479afe84b49242e174ed262456c342d70fc7f972bc13c4"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:81bb9c6d52e8321f09c3d165b2a78c680506d9af285bfccbad9fb7ad5a5da3e5"}, - {file = "greenlet-3.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da"}, - {file = "greenlet-3.0.3-cp38-cp38-win32.whl", hash = "sha256:d46677c85c5ba00a9cb6f7a00b2bfa6f812192d2c9f7d9c4f6a55b60216712f3"}, - {file = "greenlet-3.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:419b386f84949bf0e7c73e6032e3457b82a787c1ab4a0e43732898a761cc9dbf"}, - {file = "greenlet-3.0.3-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:da70d4d51c8b306bb7a031d5cff6cc25ad253affe89b70352af5f1cb68e74b53"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086152f8fbc5955df88382e8a75984e2bb1c892ad2e3c80a2508954e52295257"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d73a9fe764d77f87f8ec26a0c85144d6a951a6c438dfe50487df5595c6373eac"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b7dcbe92cc99f08c8dd11f930de4d99ef756c3591a5377d1d9cd7dd5e896da71"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1551a8195c0d4a68fac7a4325efac0d541b48def35feb49d803674ac32582f61"}, - {file = "greenlet-3.0.3-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:64d7675ad83578e3fc149b617a444fab8efdafc9385471f868eb5ff83e446b8b"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b37eef18ea55f2ffd8f00ff8fe7c8d3818abd3e25fb73fae2ca3b672e333a7a6"}, - {file = "greenlet-3.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:77457465d89b8263bca14759d7c1684df840b6811b2499838cc5b040a8b5b113"}, - {file = "greenlet-3.0.3-cp39-cp39-win32.whl", hash = "sha256:57e8974f23e47dac22b83436bdcf23080ade568ce77df33159e019d161ce1d1e"}, - {file = "greenlet-3.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:c5ee858cfe08f34712f548c3c363e807e7186f03ad7a5039ebadb29e8c6be067"}, - {file = "greenlet-3.0.3.tar.gz", hash = "sha256:43374442353259554ce33599da8b692d5aa96f8976d567d4badf263371fbe491"}, +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +files = [ + {file = "greenlet-3.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:777c1281aa7c786738683e302db0f55eb4b0077c20f1dc53db8852ffaea0a6b0"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3059c6f286b53ea4711745146ffe5a5c5ff801f62f6c56949446e0f6461f8157"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e1a40a17e2c7348f5eee5d8e1b4fa6a937f0587eba89411885a36a8e1fc29bd2"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5193135b3a8d0017cb438de0d49e92bf2f6c1c770331d24aa7500866f4db4017"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639a94d001fe874675b553f28a9d44faed90f9864dc57ba0afef3f8d76a18b04"}, + {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8fe303381e7e909e42fb23e191fc69659910909fdcd056b92f6473f80ef18543"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:72c9b668454e816b5ece25daac1a42c94d1c116d5401399a11b77ce8d883110c"}, + {file = "greenlet-3.2.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6079ae990bbf944cf66bea64a09dcb56085815630955109ffa98984810d71565"}, + {file = "greenlet-3.2.1-cp310-cp310-win_amd64.whl", hash = "sha256:e63cd2035f49376a23611fbb1643f78f8246e9d4dfd607534ec81b175ce582c2"}, + {file = "greenlet-3.2.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:aa30066fd6862e1153eaae9b51b449a6356dcdb505169647f69e6ce315b9468b"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b0f3a0a67786facf3b907a25db80efe74310f9d63cc30869e49c79ee3fcef7e"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64a4d0052de53ab3ad83ba86de5ada6aeea8f099b4e6c9ccce70fb29bc02c6a2"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:852ef432919830022f71a040ff7ba3f25ceb9fe8f3ab784befd747856ee58530"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4818116e75a0dd52cdcf40ca4b419e8ce5cb6669630cb4f13a6c384307c9543f"}, + {file = "greenlet-3.2.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9afa05fe6557bce1642d8131f87ae9462e2a8e8c46f7ed7929360616088a3975"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5c12f0d17a88664757e81a6e3fc7c2452568cf460a2f8fb44f90536b2614000b"}, + {file = "greenlet-3.2.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dbb4e1aa2000852937dd8f4357fb73e3911da426df8ca9b8df5db231922da474"}, + {file = "greenlet-3.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:cb5ee928ce5fedf9a4b0ccdc547f7887136c4af6109d8f2fe8e00f90c0db47f5"}, + {file = "greenlet-3.2.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:0ba2811509a30e5f943be048895a983a8daf0b9aa0ac0ead526dfb5d987d80ea"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4245246e72352b150a1588d43ddc8ab5e306bef924c26571aafafa5d1aaae4e8"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7abc0545d8e880779f0c7ce665a1afc3f72f0ca0d5815e2b006cafc4c1cc5840"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6dcc6d604a6575c6225ac0da39df9335cc0c6ac50725063fa90f104f3dbdb2c9"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2273586879affca2d1f414709bb1f61f0770adcabf9eda8ef48fd90b36f15d12"}, + {file = "greenlet-3.2.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ff38c869ed30fff07f1452d9a204ece1ec6d3c0870e0ba6e478ce7c1515acf22"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e934591a7a4084fa10ee5ef50eb9d2ac8c4075d5c9cf91128116b5dca49d43b1"}, + {file = "greenlet-3.2.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:063bcf7f8ee28eb91e7f7a8148c65a43b73fbdc0064ab693e024b5a940070145"}, + {file = "greenlet-3.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7132e024ebeeeabbe661cf8878aac5d2e643975c4feae833142592ec2f03263d"}, + {file = "greenlet-3.2.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:e1967882f0c42eaf42282a87579685c8673c51153b845fde1ee81be720ae27ac"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e77ae69032a95640a5fe8c857ec7bee569a0997e809570f4c92048691ce4b437"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3227c6ec1149d4520bc99edac3b9bc8358d0034825f3ca7572165cb502d8f29a"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ddda0197c5b46eedb5628d33dad034c455ae77708c7bf192686e760e26d6a0c"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de62b542e5dcf0b6116c310dec17b82bb06ef2ceb696156ff7bf74a7a498d982"}, + {file = "greenlet-3.2.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c07a0c01010df42f1f058b3973decc69c4d82e036a951c3deaf89ab114054c07"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:2530bfb0abcd451ea81068e6d0a1aac6dabf3f4c23c8bd8e2a8f579c2dd60d95"}, + {file = "greenlet-3.2.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:1c472adfca310f849903295c351d297559462067f618944ce2650a1878b84123"}, + {file = "greenlet-3.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:24a496479bc8bd01c39aa6516a43c717b4cee7196573c47b1f8e1011f7c12495"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:175d583f7d5ee57845591fc30d852b75b144eb44b05f38b67966ed6df05c8526"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ecc9d33ca9428e4536ea53e79d781792cee114d2fa2695b173092bdbd8cd6d5"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3f56382ac4df3860ebed8ed838f268f03ddf4e459b954415534130062b16bc32"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc45a7189c91c0f89aaf9d69da428ce8301b0fd66c914a499199cfb0c28420fc"}, + {file = "greenlet-3.2.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51a2f49da08cff79ee42eb22f1658a2aed60c72792f0a0a95f5f0ca6d101b1fb"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:0c68bbc639359493420282d2f34fa114e992a8724481d700da0b10d10a7611b8"}, + {file = "greenlet-3.2.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:e775176b5c203a1fa4be19f91da00fd3bff536868b77b237da3f4daa5971ae5d"}, + {file = "greenlet-3.2.1-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d6668caf15f181c1b82fb6406f3911696975cc4c37d782e19cb7ba499e556189"}, + {file = "greenlet-3.2.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:17964c246d4f6e1327edd95e2008988a8995ae3a7732be2f9fc1efed1f1cdf8c"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:04b4ec7f65f0e4a1500ac475c9343f6cc022b2363ebfb6e94f416085e40dea15"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b38d53cf268da963869aa25a6e4cc84c1c69afc1ae3391738b2603d110749d01"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a7490f74e8aabc5f29256765a99577ffde979920a2db1f3676d265a3adba41"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4339b202ac20a89ccd5bde0663b4d00dc62dd25cb3fb14f7f3034dec1b0d9ece"}, + {file = "greenlet-3.2.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1a750f1046994b9e038b45ae237d68153c29a3a783075211fb1414a180c8324b"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:374ffebaa5fbd10919cd599e5cf8ee18bae70c11f9d61e73db79826c8c93d6f9"}, + {file = "greenlet-3.2.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8b89e5d44f55372efc6072f59ced5ed1efb7b44213dab5ad7e0caba0232c6545"}, + {file = "greenlet-3.2.1-cp39-cp39-win32.whl", hash = "sha256:b7503d6b8bbdac6bbacf5a8c094f18eab7553481a1830975799042f26c9e101b"}, + {file = "greenlet-3.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:e98328b8b8f160925d6b1c5b1879d8e64f6bd8cf11472b7127d579da575b77d9"}, + {file = "greenlet-3.2.1.tar.gz", hash = "sha256:9f4dd4b4946b14bb3bf038f81e1d2e535b7d94f1b2a59fdba1293cd9c1a0a4d7"}, ] [package.extras] @@ -918,6 +856,7 @@ version = "0.15.4" description = "Simplifies gRPC interceptors" optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "grpc-interceptor-0.15.4.tar.gz", hash = "sha256:1f45c0bcb58b6f332f37c637632247c9b02bc6af0fdceb7ba7ce8d2ebbfb0926"}, {file = "grpc_interceptor-0.15.4-py3-none-any.whl", hash = "sha256:0035f33228693ed3767ee49d937bac424318db173fef4d2d0170b3215f254d9d"}, @@ -931,61 +870,67 @@ testing = ["protobuf (>=4.21.9)"] [[package]] name = "grpcio" -version = "1.64.1" +version = "1.71.0" description = "HTTP/2-based RPC framework" optional = false -python-versions = ">=3.8" -files = [ - {file = "grpcio-1.64.1-cp310-cp310-linux_armv7l.whl", hash = "sha256:55697ecec192bc3f2f3cc13a295ab670f51de29884ca9ae6cd6247df55df2502"}, - {file = "grpcio-1.64.1-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:3b64ae304c175671efdaa7ec9ae2cc36996b681eb63ca39c464958396697daff"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:bac71b4b28bc9af61efcdc7630b166440bbfbaa80940c9a697271b5e1dabbc61"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c024ffc22d6dc59000faf8ad781696d81e8e38f4078cb0f2630b4a3cf231a90"}, - {file = "grpcio-1.64.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e7cd5c1325f6808b8ae31657d281aadb2a51ac11ab081ae335f4f7fc44c1721d"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0a2813093ddb27418a4c99f9b1c223fab0b053157176a64cc9db0f4557b69bd9"}, - {file = "grpcio-1.64.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2981c7365a9353f9b5c864595c510c983251b1ab403e05b1ccc70a3d9541a73b"}, - {file = "grpcio-1.64.1-cp310-cp310-win32.whl", hash = "sha256:1262402af5a511c245c3ae918167eca57342c72320dffae5d9b51840c4b2f86d"}, - {file = "grpcio-1.64.1-cp310-cp310-win_amd64.whl", hash = "sha256:19264fc964576ddb065368cae953f8d0514ecc6cb3da8903766d9fb9d4554c33"}, - {file = "grpcio-1.64.1-cp311-cp311-linux_armv7l.whl", hash = "sha256:58b1041e7c870bb30ee41d3090cbd6f0851f30ae4eb68228955d973d3efa2e61"}, - {file = "grpcio-1.64.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bbc5b1d78a7822b0a84c6f8917faa986c1a744e65d762ef6d8be9d75677af2ca"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:5841dd1f284bd1b3d8a6eca3a7f062b06f1eec09b184397e1d1d43447e89a7ae"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8caee47e970b92b3dd948371230fcceb80d3f2277b3bf7fbd7c0564e7d39068e"}, - {file = "grpcio-1.64.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73819689c169417a4f978e562d24f2def2be75739c4bed1992435d007819da1b"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6503b64c8b2dfad299749cad1b595c650c91e5b2c8a1b775380fcf8d2cbba1e9"}, - {file = "grpcio-1.64.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1de403fc1305fd96cfa75e83be3dee8538f2413a6b1685b8452301c7ba33c294"}, - {file = "grpcio-1.64.1-cp311-cp311-win32.whl", hash = "sha256:d4d29cc612e1332237877dfa7fe687157973aab1d63bd0f84cf06692f04c0367"}, - {file = "grpcio-1.64.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e56462b05a6f860b72f0fa50dca06d5b26543a4e88d0396259a07dc30f4e5aa"}, - {file = "grpcio-1.64.1-cp312-cp312-linux_armv7l.whl", hash = "sha256:4657d24c8063e6095f850b68f2d1ba3b39f2b287a38242dcabc166453e950c59"}, - {file = "grpcio-1.64.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:62b4e6eb7bf901719fce0ca83e3ed474ae5022bb3827b0a501e056458c51c0a1"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:ee73a2f5ca4ba44fa33b4d7d2c71e2c8a9e9f78d53f6507ad68e7d2ad5f64a22"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:198908f9b22e2672a998870355e226a725aeab327ac4e6ff3a1399792ece4762"}, - {file = "grpcio-1.64.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:39b9d0acaa8d835a6566c640f48b50054f422d03e77e49716d4c4e8e279665a1"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5e42634a989c3aa6049f132266faf6b949ec2a6f7d302dbb5c15395b77d757eb"}, - {file = "grpcio-1.64.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1a82e0b9b3022799c336e1fc0f6210adc019ae84efb7321d668129d28ee1efb"}, - {file = "grpcio-1.64.1-cp312-cp312-win32.whl", hash = "sha256:55260032b95c49bee69a423c2f5365baa9369d2f7d233e933564d8a47b893027"}, - {file = "grpcio-1.64.1-cp312-cp312-win_amd64.whl", hash = "sha256:c1a786ac592b47573a5bb7e35665c08064a5d77ab88a076eec11f8ae86b3e3f6"}, - {file = "grpcio-1.64.1-cp38-cp38-linux_armv7l.whl", hash = "sha256:a011ac6c03cfe162ff2b727bcb530567826cec85eb8d4ad2bfb4bd023287a52d"}, - {file = "grpcio-1.64.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:4d6dab6124225496010bd22690f2d9bd35c7cbb267b3f14e7a3eb05c911325d4"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:a5e771d0252e871ce194d0fdcafd13971f1aae0ddacc5f25615030d5df55c3a2"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3c1b90ab93fed424e454e93c0ed0b9d552bdf1b0929712b094f5ecfe7a23ad"}, - {file = "grpcio-1.64.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20405cb8b13fd779135df23fabadc53b86522d0f1cba8cca0e87968587f50650"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:0cc79c982ccb2feec8aad0e8fb0d168bcbca85bc77b080d0d3c5f2f15c24ea8f"}, - {file = "grpcio-1.64.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3a035c37ce7565b8f4f35ff683a4db34d24e53dc487e47438e434eb3f701b2a"}, - {file = "grpcio-1.64.1-cp38-cp38-win32.whl", hash = "sha256:1257b76748612aca0f89beec7fa0615727fd6f2a1ad580a9638816a4b2eb18fd"}, - {file = "grpcio-1.64.1-cp38-cp38-win_amd64.whl", hash = "sha256:0a12ddb1678ebc6a84ec6b0487feac020ee2b1659cbe69b80f06dbffdb249122"}, - {file = "grpcio-1.64.1-cp39-cp39-linux_armv7l.whl", hash = "sha256:75dbbf415026d2862192fe1b28d71f209e2fd87079d98470db90bebe57b33179"}, - {file = "grpcio-1.64.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e3d9f8d1221baa0ced7ec7322a981e28deb23749c76eeeb3d33e18b72935ab62"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5f8b75f64d5d324c565b263c67dbe4f0af595635bbdd93bb1a88189fc62ed2e5"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c84ad903d0d94311a2b7eea608da163dace97c5fe9412ea311e72c3684925602"}, - {file = "grpcio-1.64.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:940e3ec884520155f68a3b712d045e077d61c520a195d1a5932c531f11883489"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f10193c69fc9d3d726e83bbf0f3d316f1847c3071c8c93d8090cf5f326b14309"}, - {file = "grpcio-1.64.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac15b6c2c80a4d1338b04d42a02d376a53395ddf0ec9ab157cbaf44191f3ffdd"}, - {file = "grpcio-1.64.1-cp39-cp39-win32.whl", hash = "sha256:03b43d0ccf99c557ec671c7dede64f023c7da9bb632ac65dbc57f166e4970040"}, - {file = "grpcio-1.64.1-cp39-cp39-win_amd64.whl", hash = "sha256:ed6091fa0adcc7e4ff944090cf203a52da35c37a130efa564ded02b7aff63bcd"}, - {file = "grpcio-1.64.1.tar.gz", hash = "sha256:8d51dd1c59d5fa0f34266b80a3805ec29a1f26425c2a54736133f6d87fc4968a"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "grpcio-1.71.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:c200cb6f2393468142eb50ab19613229dcc7829b5ccee8b658a36005f6669fdd"}, + {file = "grpcio-1.71.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b2266862c5ad664a380fbbcdbdb8289d71464c42a8c29053820ee78ba0119e5d"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:0ab8b2864396663a5b0b0d6d79495657ae85fa37dcb6498a2669d067c65c11ea"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c30f393f9d5ff00a71bb56de4aa75b8fe91b161aeb61d39528db6b768d7eac69"}, + {file = "grpcio-1.71.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f250ff44843d9a0615e350c77f890082102a0318d66a99540f54769c8766ab73"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e6d8de076528f7c43a2f576bc311799f89d795aa6c9b637377cc2b1616473804"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:9b91879d6da1605811ebc60d21ab6a7e4bae6c35f6b63a061d61eb818c8168f6"}, + {file = "grpcio-1.71.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f71574afdf944e6652203cd1badcda195b2a27d9c83e6d88dc1ce3cfb73b31a5"}, + {file = "grpcio-1.71.0-cp310-cp310-win32.whl", hash = "sha256:8997d6785e93308f277884ee6899ba63baafa0dfb4729748200fcc537858a509"}, + {file = "grpcio-1.71.0-cp310-cp310-win_amd64.whl", hash = "sha256:7d6ac9481d9d0d129224f6d5934d5832c4b1cddb96b59e7eba8416868909786a"}, + {file = "grpcio-1.71.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:d6aa986318c36508dc1d5001a3ff169a15b99b9f96ef5e98e13522c506b37eef"}, + {file = "grpcio-1.71.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:d2c170247315f2d7e5798a22358e982ad6eeb68fa20cf7a820bb74c11f0736e7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:e6f83a583ed0a5b08c5bc7a3fe860bb3c2eac1f03f1f63e0bc2091325605d2b7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be74ddeeb92cc87190e0e376dbc8fc7736dbb6d3d454f2fa1f5be1dee26b9d7"}, + {file = "grpcio-1.71.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd0dfbe4d5eb1fcfec9490ca13f82b089a309dc3678e2edabc144051270a66e"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a2242d6950dc892afdf9e951ed7ff89473aaf744b7d5727ad56bdaace363722b"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0fa05ee31a20456b13ae49ad2e5d585265f71dd19fbd9ef983c28f926d45d0a7"}, + {file = "grpcio-1.71.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3d081e859fb1ebe176de33fc3adb26c7d46b8812f906042705346b314bde32c3"}, + {file = "grpcio-1.71.0-cp311-cp311-win32.whl", hash = "sha256:d6de81c9c00c8a23047136b11794b3584cdc1460ed7cbc10eada50614baa1444"}, + {file = "grpcio-1.71.0-cp311-cp311-win_amd64.whl", hash = "sha256:24e867651fc67717b6f896d5f0cac0ec863a8b5fb7d6441c2ab428f52c651c6b"}, + {file = "grpcio-1.71.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:0ff35c8d807c1c7531d3002be03221ff9ae15712b53ab46e2a0b4bb271f38537"}, + {file = "grpcio-1.71.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:b78a99cd1ece4be92ab7c07765a0b038194ded2e0a26fd654591ee136088d8d7"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc1a1231ed23caac1de9f943d031f1bc38d0f69d2a3b243ea0d664fc1fbd7fec"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6beeea5566092c5e3c4896c6d1d307fb46b1d4bdf3e70c8340b190a69198594"}, + {file = "grpcio-1.71.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5170929109450a2c031cfe87d6716f2fae39695ad5335d9106ae88cc32dc84c"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:5b08d03ace7aca7b2fadd4baf291139b4a5f058805a8327bfe9aece7253b6d67"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f903017db76bf9cc2b2d8bdd37bf04b505bbccad6be8a81e1542206875d0e9db"}, + {file = "grpcio-1.71.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:469f42a0b410883185eab4689060a20488a1a0a00f8bbb3cbc1061197b4c5a79"}, + {file = "grpcio-1.71.0-cp312-cp312-win32.whl", hash = "sha256:ad9f30838550695b5eb302add33f21f7301b882937460dd24f24b3cc5a95067a"}, + {file = "grpcio-1.71.0-cp312-cp312-win_amd64.whl", hash = "sha256:652350609332de6dac4ece254e5d7e1ff834e203d6afb769601f286886f6f3a8"}, + {file = "grpcio-1.71.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:cebc1b34ba40a312ab480ccdb396ff3c529377a2fce72c45a741f7215bfe8379"}, + {file = "grpcio-1.71.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:85da336e3649a3d2171e82f696b5cad2c6231fdd5bad52616476235681bee5b3"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:f9a412f55bb6e8f3bb000e020dbc1e709627dcb3a56f6431fa7076b4c1aab0db"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47be9584729534660416f6d2a3108aaeac1122f6b5bdbf9fd823e11fe6fbaa29"}, + {file = "grpcio-1.71.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c9c80ac6091c916db81131d50926a93ab162a7e97e4428ffc186b6e80d6dda4"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:789d5e2a3a15419374b7b45cd680b1e83bbc1e52b9086e49308e2c0b5bbae6e3"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1be857615e26a86d7363e8a163fade914595c81fec962b3d514a4b1e8760467b"}, + {file = "grpcio-1.71.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:a76d39b5fafd79ed604c4be0a869ec3581a172a707e2a8d7a4858cb05a5a7637"}, + {file = "grpcio-1.71.0-cp313-cp313-win32.whl", hash = "sha256:74258dce215cb1995083daa17b379a1a5a87d275387b7ffe137f1d5131e2cfbb"}, + {file = "grpcio-1.71.0-cp313-cp313-win_amd64.whl", hash = "sha256:22c3bc8d488c039a199f7a003a38cb7635db6656fa96437a8accde8322ce2366"}, + {file = "grpcio-1.71.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c6a0a28450c16809f94e0b5bfe52cabff63e7e4b97b44123ebf77f448534d07d"}, + {file = "grpcio-1.71.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:a371e6b6a5379d3692cc4ea1cb92754d2a47bdddeee755d3203d1f84ae08e03e"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:39983a9245d37394fd59de71e88c4b295eb510a3555e0a847d9965088cdbd033"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9182e0063112e55e74ee7584769ec5a0b4f18252c35787f48738627e23a62b97"}, + {file = "grpcio-1.71.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693bc706c031aeb848849b9d1c6b63ae6bcc64057984bb91a542332b75aa4c3d"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20e8f653abd5ec606be69540f57289274c9ca503ed38388481e98fa396ed0b41"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8700a2a57771cc43ea295296330daaddc0d93c088f0a35cc969292b6db959bf3"}, + {file = "grpcio-1.71.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d35a95f05a8a2cbe8e02be137740138b3b2ea5f80bd004444e4f9a1ffc511e32"}, + {file = "grpcio-1.71.0-cp39-cp39-win32.whl", hash = "sha256:f9c30c464cb2ddfbc2ddf9400287701270fdc0f14be5f08a1e3939f1e749b455"}, + {file = "grpcio-1.71.0-cp39-cp39-win_amd64.whl", hash = "sha256:63e41b91032f298b3e973b3fa4093cbbc620c875e2da7b93e249d4728b54559a"}, + {file = "grpcio-1.71.0.tar.gz", hash = "sha256:2b85f7820475ad3edec209d3d89a7909ada16caab05d3f2e08a7e8ae3200a55c"}, ] [package.extras] -protobuf = ["grpcio-tools (>=1.64.1)"] +protobuf = ["grpcio-tools (>=1.71.0)"] [[package]] name = "h11" @@ -993,6 +938,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1004,6 +950,7 @@ version = "1.3.0" description = "Pythonic HTML generation/templating (no template files)" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "html5tagger-1.3.0-py3-none-any.whl", hash = "sha256:ce14313515edffec8ed8a36c5890d023922641171b4e6e5774ad1a74998f5351"}, {file = "html5tagger-1.3.0.tar.gz", hash = "sha256:84fa3dfb49e5c83b79bbd856ab7b1de8e2311c3bb46a8be925f119e3880a8da9"}, @@ -1011,13 +958,14 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.8" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.8-py3-none-any.whl", hash = "sha256:5254cf149bcb5f75e9d1b2b9f729ea4a4b883d1ad7379fc632b727cec23674be"}, + {file = "httpcore-1.0.8.tar.gz", hash = "sha256:86e94505ed24ea06514883fd44d2bc02d90e77e7979c8eb71b90f41d364a1bad"}, ] [package.dependencies] @@ -1028,65 +976,74 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httptools" -version = "0.6.1" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +groups = ["main"] +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] +test = ["Cython (>=0.29.24)"] [[package]] name = "httpx" -version = "0.28.0" +version = "0.28.1" description = "The next generation HTTP client." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc"}, - {file = "httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0"}, + {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, + {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, ] [package.dependencies] @@ -1096,7 +1053,7 @@ httpcore = "==1.*" idna = "*" [package.extras] -brotli = ["brotli", "brotlicffi"] +brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] @@ -1104,13 +1061,14 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "httpx-ws" -version = "0.6.0" +version = "0.7.2" description = "WebSockets support for HTTPX" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "httpx_ws-0.6.0-py3-none-any.whl", hash = "sha256:437cfca94519a4e6ae06eb5573192df6c0da85c22b1a19cc1ea0b02b05a51d25"}, - {file = "httpx_ws-0.6.0.tar.gz", hash = "sha256:60218f531fb474a2143af38568f4b7d94ba356780973443365c8e2c87882bb8c"}, + {file = "httpx_ws-0.7.2-py3-none-any.whl", hash = "sha256:dd7bf9dbaa96dcd5cef1af3a7e1130cfac068bebecce25a74145022f5a8427a3"}, + {file = "httpx_ws-0.7.2.tar.gz", hash = "sha256:93edea6c8fc313464fc287bff7d2ad20e6196b7754c76f946f73b4af79886d4e"}, ] [package.dependencies] @@ -1121,21 +1079,26 @@ wsproto = "*" [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "inflect" version = "5.6.2" description = "Correctly generate plurals, singular nouns, ordinals, indefinite articles; convert numbers to words" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "inflect-5.6.2-py3-none-any.whl", hash = "sha256:b45d91a4a28a4e617ff1821117439b06eaa86e2a4573154af0149e9be6687238"}, {file = "inflect-5.6.2.tar.gz", hash = "sha256:aadc7ed73928f5e014129794bbac03058cca35d0a973a5fc4eb45c7fa26005f9"}, @@ -1143,18 +1106,7 @@ files = [ [package.extras] docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - -[[package]] -name = "inflection" -version = "0.5.1" -description = "A port of Ruby on Rails inflector to Python" -optional = false -python-versions = ">=3.5" -files = [ - {file = "inflection-0.5.1-py2.py3-none-any.whl", hash = "sha256:f38b2b640938a4f35ade69ac3d053042959b62a0f1076a5bbaa1b9526605a8a2"}, - {file = "inflection-0.5.1.tar.gz", hash = "sha256:1a29730d366e996aaacffb2f1f1cb9593dc38e2ddd30c91250c6dde09ea9b417"}, -] +testing = ["pygments", "pytest (>=6)", "pytest-black (>=0.3.7) ; platform_python_implementation != \"PyPy\"", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1) ; platform_python_implementation != \"PyPy\""] [[package]] name = "isort" @@ -1162,6 +1114,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1172,13 +1125,14 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1187,15 +1141,28 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jmespath" +version = "1.0.1" +description = "JSON Matching Expressions" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "jmespath-1.0.1-py3-none-any.whl", hash = "sha256:02e2e4cc71b5bcab88332eebf907519190dd9e6e82107fa7f83b1003a6252980"}, + {file = "jmespath-1.0.1.tar.gz", hash = "sha256:90261b206d6defd58fdd5e85f478bf633a2901798906be2ad389150c5c60edbe"}, +] + [[package]] name = "kr8s" -version = "0.18.1" +version = "0.20.7" description = "A Kubernetes API library" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "kr8s-0.18.1-py3-none-any.whl", hash = "sha256:192d659c70c7650e7641c3c69a656ac16e51672118468eef8224ea60009932c4"}, - {file = "kr8s-0.18.1.tar.gz", hash = "sha256:73c864c108e2f5159faab8dba9833011d586918f4520dfc64594df7b7907493f"}, + {file = "kr8s-0.20.7-py3-none-any.whl", hash = "sha256:e489b97ff513c167f427f479ad5420c78adffd1a6ce5033b079109374200c0c6"}, + {file = "kr8s-0.20.7.tar.gz", hash = "sha256:ac45e966beea0f6f92f635b3e61e64b8e27962b4825d77b814a663e819a8ec16"}, ] [package.dependencies] @@ -1203,7 +1170,7 @@ anyio = ">=3.7.0" asyncache = ">=0.3.1" cryptography = ">=35" httpx = ">=0.24.1" -httpx-ws = ">=0.5.2" +httpx-ws = ">=0.7.0" python-box = ">=7.0.1" python-jsonpath = ">=0.7.1" pyyaml = ">=6.0" @@ -1219,6 +1186,7 @@ version = "31.0.0" description = "Kubernetes python client" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "kubernetes-31.0.0-py2.py3-none-any.whl", hash = "sha256:bf141e2d380c8520eada8b351f4e319ffee9636328c137aa432bc486ca1200e1"}, {file = "kubernetes-31.0.0.tar.gz", hash = "sha256:28945de906c8c259c1ebe62703b56a03b714049372196f854105afe4e6d014c0"}, @@ -1240,15 +1208,33 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "lark" +version = "0.12.0" +description = "a modern parsing library" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "lark-0.12.0-py2.py3-none-any.whl", hash = "sha256:ed1d891cbcf5151ead1c1d14663bf542443e579e63a76ae175b01b899bd854ca"}, + {file = "lark-0.12.0.tar.gz", hash = "sha256:7da76fcfddadabbbbfd949bbae221efd33938451d90b1fefbbc423c3cccf48ef"}, +] + +[package.extras] +atomic-cache = ["atomicwrites"] +nearley = ["js2py"] +regex = ["regex"] + [[package]] name = "mako" -version = "1.3.5" +version = "1.3.10" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, + {file = "mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59"}, + {file = "mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28"}, ] [package.dependencies] @@ -1259,84 +1245,102 @@ babel = ["Babel"] lingua = ["lingua"] testing = ["pytest"] +[[package]] +name = "markdown-code-runner" +version = "2.2.0" +description = "Automatically execute code blocks within a Markdown file and update the output in-place" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "markdown_code_runner-2.2.0-py3-none-any.whl", hash = "sha256:d8812c48ad3fd4a3f3725dfcd5a1b7e5baf7216855eeea8a92c7fd9120717ac6"}, + {file = "markdown_code_runner-2.2.0.tar.gz", hash = "sha256:3c495998a437bc7d7a4b1a5ce518bce10cf5ba0fa69c569fee1e32c5238603c4"}, +] + +[package.extras] +test = ["coverage", "pre-commit", "pytest", "pytest-cov"] + [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "marshmallow" -version = "3.23.1" +version = "3.26.1" description = "A lightweight library for converting complex datatypes to and from native Python datatypes." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "marshmallow-3.23.1-py3-none-any.whl", hash = "sha256:fece2eb2c941180ea1b7fcbd4a83c51bfdd50093fdd3ad2585ee5e1df2508491"}, - {file = "marshmallow-3.23.1.tar.gz", hash = "sha256:3a8dfda6edd8dcdbf216c0ede1d1e78d230a6dc9c5a088f58c4083b974a0d468"}, + {file = "marshmallow-3.26.1-py3-none-any.whl", hash = "sha256:3350409f20a70a7e4e11a27661187b77cdcaeb20abca41c1454fe33636bea09c"}, + {file = "marshmallow-3.26.1.tar.gz", hash = "sha256:e6d8affb6cb61d39d26402096dc0aee12d5a26d490a121f118d2e81dc0719dc6"}, ] [package.dependencies] @@ -1344,143 +1348,154 @@ packaging = ">=17.0" [package.extras] dev = ["marshmallow[tests]", "pre-commit (>=3.5,<5.0)", "tox"] -docs = ["alabaster (==1.0.0)", "autodocsumm (==0.2.14)", "sphinx (==8.1.3)", "sphinx-issues (==5.0.0)", "sphinx-version-warning (==1.1.2)"] +docs = ["autodocsumm (==0.2.14)", "furo (==2024.8.6)", "sphinx (==8.1.3)", "sphinx-copybutton (==0.5.2)", "sphinx-issues (==5.0.0)", "sphinxext-opengraph (==0.9.1)"] tests = ["pytest", "simplejson"] [[package]] -name = "multidict" -version = "6.0.5" -description = "multidict implementation" +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" optional = false -python-versions = ">=3.7" +python-versions = "*" +groups = ["main"] files = [ - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:228b644ae063c10e7f324ab1ab6b548bdf6f8b47f3ec234fef1093bc2735e5f9"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:896ebdcf62683551312c30e20614305f53125750803b614e9e6ce74a96232604"}, - {file = "multidict-6.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:411bf8515f3be9813d06004cac41ccf7d1cd46dfe233705933dd163b60e37600"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d147090048129ce3c453f0292e7697d333db95e52616b3793922945804a433c"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:215ed703caf15f578dca76ee6f6b21b7603791ae090fbf1ef9d865571039ade5"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c6390cf87ff6234643428991b7359b5f59cc15155695deb4eda5c777d2b880f"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fd81c4ebdb4f214161be351eb5bcf385426bf023041da2fd9e60681f3cebae"}, - {file = "multidict-6.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cc2ad10255f903656017363cd59436f2111443a76f996584d1077e43ee51182"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6939c95381e003f54cd4c5516740faba40cf5ad3eeff460c3ad1d3e0ea2549bf"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:220dd781e3f7af2c2c1053da9fa96d9cf3072ca58f057f4c5adaaa1cab8fc442"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:766c8f7511df26d9f11cd3a8be623e59cca73d44643abab3f8c8c07620524e4a"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fe5d7785250541f7f5019ab9cba2c71169dc7d74d0f45253f8313f436458a4ef"}, - {file = "multidict-6.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c1c1496e73051918fcd4f58ff2e0f2f3066d1c76a0c6aeffd9b45d53243702cc"}, - {file = "multidict-6.0.5-cp310-cp310-win32.whl", hash = "sha256:7afcdd1fc07befad18ec4523a782cde4e93e0a2bf71239894b8d61ee578c1319"}, - {file = "multidict-6.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:99f60d34c048c5c2fabc766108c103612344c46e35d4ed9ae0673d33c8fb26e8"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f285e862d2f153a70586579c15c44656f888806ed0e5b56b64489afe4a2dbfba"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:53689bb4e102200a4fafa9de9c7c3c212ab40a7ab2c8e474491914d2305f187e"}, - {file = "multidict-6.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:612d1156111ae11d14afaf3a0669ebf6c170dbb735e510a7438ffe2369a847fd"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7be7047bd08accdb7487737631d25735c9a04327911de89ff1b26b81745bd4e3"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de170c7b4fe6859beb8926e84f7d7d6c693dfe8e27372ce3b76f01c46e489fcf"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04bde7a7b3de05732a4eb39c94574db1ec99abb56162d6c520ad26f83267de29"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85f67aed7bb647f93e7520633d8f51d3cbc6ab96957c71272b286b2f30dc70ed"}, - {file = "multidict-6.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425bf820055005bfc8aa9a0b99ccb52cc2f4070153e34b701acc98d201693733"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3eb1ceec286eba8220c26f3b0096cf189aea7057b6e7b7a2e60ed36b373b77f"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7901c05ead4b3fb75113fb1dd33eb1253c6d3ee37ce93305acd9d38e0b5f21a4"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e0e79d91e71b9867c73323a3444724d496c037e578a0e1755ae159ba14f4f3d1"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:29bfeb0dff5cb5fdab2023a7a9947b3b4af63e9c47cae2a10ad58394b517fddc"}, - {file = "multidict-6.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e030047e85cbcedbfc073f71836d62dd5dadfbe7531cae27789ff66bc551bd5e"}, - {file = "multidict-6.0.5-cp311-cp311-win32.whl", hash = "sha256:2f4848aa3baa109e6ab81fe2006c77ed4d3cd1e0ac2c1fbddb7b1277c168788c"}, - {file = "multidict-6.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:2faa5ae9376faba05f630d7e5e6be05be22913782b927b19d12b8145968a85ea"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:51d035609b86722963404f711db441cf7134f1889107fb171a970c9701f92e1e"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:cbebcd5bcaf1eaf302617c114aa67569dd3f090dd0ce8ba9e35e9985b41ac35b"}, - {file = "multidict-6.0.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2ffc42c922dbfddb4a4c3b438eb056828719f07608af27d163191cb3e3aa6cc5"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ceb3b7e6a0135e092de86110c5a74e46bda4bd4fbfeeb3a3bcec79c0f861e450"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79660376075cfd4b2c80f295528aa6beb2058fd289f4c9252f986751a4cd0496"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e4428b29611e989719874670fd152b6625500ad6c686d464e99f5aaeeaca175a"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d84a5c3a5f7ce6db1f999fb9438f686bc2e09d38143f2d93d8406ed2dd6b9226"}, - {file = "multidict-6.0.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76c0de87358b192de7ea9649beb392f107dcad9ad27276324c24c91774ca5271"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:79a6d2ba910adb2cbafc95dad936f8b9386e77c84c35bc0add315b856d7c3abb"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:92d16a3e275e38293623ebf639c471d3e03bb20b8ebb845237e0d3664914caef"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:fb616be3538599e797a2017cccca78e354c767165e8858ab5116813146041a24"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:14c2976aa9038c2629efa2c148022ed5eb4cb939e15ec7aace7ca932f48f9ba6"}, - {file = "multidict-6.0.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:435a0984199d81ca178b9ae2c26ec3d49692d20ee29bc4c11a2a8d4514c67eda"}, - {file = "multidict-6.0.5-cp312-cp312-win32.whl", hash = "sha256:9fe7b0653ba3d9d65cbe7698cca585bf0f8c83dbbcc710db9c90f478e175f2d5"}, - {file = "multidict-6.0.5-cp312-cp312-win_amd64.whl", hash = "sha256:01265f5e40f5a17f8241d52656ed27192be03bfa8764d88e8220141d1e4b3556"}, - {file = "multidict-6.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:19fe01cea168585ba0f678cad6f58133db2aa14eccaf22f88e4a6dccadfad8b3"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bf7a982604375a8d49b6cc1b781c1747f243d91b81035a9b43a2126c04766f5"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:107c0cdefe028703fb5dafe640a409cb146d44a6ae201e55b35a4af8e95457dd"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:403c0911cd5d5791605808b942c88a8155c2592e05332d2bf78f18697a5fa15e"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aeaf541ddbad8311a87dd695ed9642401131ea39ad7bc8cf3ef3967fd093b626"}, - {file = "multidict-6.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e4972624066095e52b569e02b5ca97dbd7a7ddd4294bf4e7247d52635630dd83"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d946b0a9eb8aaa590df1fe082cee553ceab173e6cb5b03239716338629c50c7a"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b55358304d7a73d7bdf5de62494aaf70bd33015831ffd98bc498b433dfe5b10c"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:a3145cb08d8625b2d3fee1b2d596a8766352979c9bffe5d7833e0503d0f0b5e5"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d65f25da8e248202bd47445cec78e0025c0fe7582b23ec69c3b27a640dd7a8e3"}, - {file = "multidict-6.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c9bf56195c6bbd293340ea82eafd0071cb3d450c703d2c93afb89f93b8386ccc"}, - {file = "multidict-6.0.5-cp37-cp37m-win32.whl", hash = "sha256:69db76c09796b313331bb7048229e3bee7928eb62bab5e071e9f7fcc4879caee"}, - {file = "multidict-6.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:fce28b3c8a81b6b36dfac9feb1de115bab619b3c13905b419ec71d03a3fc1423"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76f067f5121dcecf0d63a67f29080b26c43c71a98b10c701b0677e4a065fbd54"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b82cc8ace10ab5bd93235dfaab2021c70637005e1ac787031f4d1da63d493c1d"}, - {file = "multidict-6.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5cb241881eefd96b46f89b1a056187ea8e9ba14ab88ba632e68d7a2ecb7aadf7"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8e94e6912639a02ce173341ff62cc1201232ab86b8a8fcc05572741a5dc7d93"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09a892e4a9fb47331da06948690ae38eaa2426de97b4ccbfafbdcbe5c8f37ff8"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55205d03e8a598cfc688c71ca8ea5f66447164efff8869517f175ea632c7cb7b"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37b15024f864916b4951adb95d3a80c9431299080341ab9544ed148091b53f50"}, - {file = "multidict-6.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2a1dee728b52b33eebff5072817176c172050d44d67befd681609b4746e1c2e"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:edd08e6f2f1a390bf137080507e44ccc086353c8e98c657e666c017718561b89"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:60d698e8179a42ec85172d12f50b1668254628425a6bd611aba022257cac1386"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:3d25f19500588cbc47dc19081d78131c32637c25804df8414463ec908631e453"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4cc0ef8b962ac7a5e62b9e826bd0cd5040e7d401bc45a6835910ed699037a461"}, - {file = "multidict-6.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:eca2e9d0cc5a889850e9bbd68e98314ada174ff6ccd1129500103df7a94a7a44"}, - {file = "multidict-6.0.5-cp38-cp38-win32.whl", hash = "sha256:4a6a4f196f08c58c59e0b8ef8ec441d12aee4125a7d4f4fef000ccb22f8d7241"}, - {file = "multidict-6.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:0275e35209c27a3f7951e1ce7aaf93ce0d163b28948444bec61dd7badc6d3f8c"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e7be68734bd8c9a513f2b0cfd508802d6609da068f40dc57d4e3494cefc92929"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1d9ea7a7e779d7a3561aade7d596649fbecfa5c08a7674b11b423783217933f9"}, - {file = "multidict-6.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ea1456df2a27c73ce51120fa2f519f1bea2f4a03a917f4a43c8707cf4cbbae1a"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf590b134eb70629e350691ecca88eac3e3b8b3c86992042fb82e3cb1830d5e1"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5c0631926c4f58e9a5ccce555ad7747d9a9f8b10619621f22f9635f069f6233e"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dce1c6912ab9ff5f179eaf6efe7365c1f425ed690b03341911bf4939ef2f3046"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0868d64af83169e4d4152ec612637a543f7a336e4a307b119e98042e852ad9c"}, - {file = "multidict-6.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:141b43360bfd3bdd75f15ed811850763555a251e38b2405967f8e25fb43f7d40"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7df704ca8cf4a073334e0427ae2345323613e4df18cc224f647f251e5e75a527"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6214c5a5571802c33f80e6c84713b2c79e024995b9c5897f794b43e714daeec9"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:cd6c8fca38178e12c00418de737aef1261576bd1b6e8c6134d3e729a4e858b38"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e02021f87a5b6932fa6ce916ca004c4d441509d33bbdbeca70d05dff5e9d2479"}, - {file = "multidict-6.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ebd8d160f91a764652d3e51ce0d2956b38efe37c9231cd82cfc0bed2e40b581c"}, - {file = "multidict-6.0.5-cp39-cp39-win32.whl", hash = "sha256:04da1bb8c8dbadf2a18a452639771951c662c5ad03aefe4884775454be322c9b"}, - {file = "multidict-6.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:d6f6d4f185481c9669b9447bf9d9cf3b95a0e9df9d169bbc17e363b7d5487755"}, - {file = "multidict-6.0.5-py3-none-any.whl", hash = "sha256:0d63c74e3d7ab26de115c49bffc92cc77ed23395303d496eae515d4204a625e7"}, - {file = "multidict-6.0.5.tar.gz", hash = "sha256:f7e301075edaf50500f0b341543c41194d8df3ae5caf4702f2095f3ca73dd8da"}, + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, ] [[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." +name = "multidict" +version = "6.4.3" +description = "multidict implementation" optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, ] [[package]] -name = "networkx" -version = "3.3" -description = "Python package for creating and manipulating graphs and networks" +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = ">=3.10" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "networkx-3.3-py3-none-any.whl", hash = "sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"}, - {file = "networkx-3.3.tar.gz", hash = "sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9"}, + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, ] -[package.extras] -default = ["matplotlib (>=3.6)", "numpy (>=1.23)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["myst-nb (>=1.0)", "numpydoc (>=1.7)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=2.0)", "pygraphviz (>=1.12)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - [[package]] name = "oauthlib" version = "3.2.2" description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, @@ -1493,13 +1508,26 @@ signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] [[package]] name = "packaging" -version = "24.1" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "parsy" +version = "2.1" +description = "Easy-to-use parser combinators, for parsing in pure Python" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "parsy-2.1-py3-none-any.whl", hash = "sha256:8f18e7b11985e7802e7e3ecbd8291c6ca243d29820b1186e4c84605db4efffa0"}, + {file = "parsy-2.1.tar.gz", hash = "sha256:fd5dd18d7b0b61f8275ee88665f430a20c02cf5a82d88557f35330530186d7ac"}, ] [[package]] @@ -1508,6 +1536,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -1515,19 +1544,46 @@ files = [ [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.7" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, + {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.14.1)"] + +[[package]] +name = "posthog" +version = "3.25.0" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "posthog-3.25.0-py2.py3-none-any.whl", hash = "sha256:85db78c13d1ecb11aed06fad53759c4e8fb3633442c2f3d0336bc0ce8a585d30"}, + {file = "posthog-3.25.0.tar.gz", hash = "sha256:9168f3e7a0a5571b6b1065c41b3c171fbc68bfe72c3ac0bfd6e3d2fcdb7df2ca"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +distro = ">=1.5.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"] +langchain = ["langchain (>=0.2.0)"] +sentry = ["django", "sentry-sdk"] +test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"] [[package]] name = "prometheus-client" @@ -1535,6 +1591,7 @@ version = "0.7.1" description = "Python client for the Prometheus monitoring system." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "prometheus_client-0.7.1.tar.gz", hash = "sha256:71cd24a2b3eb335cb800c7159f423df1bd4dcd5171b234be15e3f31ec9f622da"}, ] @@ -1548,6 +1605,7 @@ version = "3.0.0" description = "Exposes Prometheus monitoring metrics of Sanic apps." optional = false python-versions = ">=3.7,<4.0" +groups = ["main"] files = [ {file = "prometheus-sanic-3.0.0.tar.gz", hash = "sha256:06cfe8f9c843a1324fa801b9092f26470a63196b9e08fad0c0f12b49ddbf6c3c"}, {file = "prometheus_sanic-3.0.0-py3-none-any.whl", hash = "sha256:499110bf2a86f921b229083e0bcea4d489420abf6737e0d838cd234394fd91aa"}, @@ -1559,167 +1617,167 @@ sanic = ">=22.0.0" [[package]] name = "protobuf" -version = "5.29.0" +version = "5.29.4" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "protobuf-5.29.4-cp310-abi3-win32.whl", hash = "sha256:13eb236f8eb9ec34e63fc8b1d6efd2777d062fa6aaa68268fb67cf77f6839ad7"}, + {file = "protobuf-5.29.4-cp310-abi3-win_amd64.whl", hash = "sha256:bcefcdf3976233f8a502d265eb65ea740c989bacc6c30a58290ed0e519eb4b8d"}, + {file = "protobuf-5.29.4-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:307ecba1d852ec237e9ba668e087326a67564ef83e45a0189a772ede9e854dd0"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:aec4962f9ea93c431d5714ed1be1c93f13e1a8618e70035ba2b0564d9e633f2e"}, + {file = "protobuf-5.29.4-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:d7d3f7d1d5a66ed4942d4fefb12ac4b14a29028b209d4bfb25c68ae172059922"}, + {file = "protobuf-5.29.4-cp38-cp38-win32.whl", hash = "sha256:1832f0515b62d12d8e6ffc078d7e9eb06969aa6dc13c13e1036e39d73bebc2de"}, + {file = "protobuf-5.29.4-cp38-cp38-win_amd64.whl", hash = "sha256:476cb7b14914c780605a8cf62e38c2a85f8caff2e28a6a0bad827ec7d6c85d68"}, + {file = "protobuf-5.29.4-cp39-cp39-win32.whl", hash = "sha256:fd32223020cb25a2cc100366f1dedc904e2d71d9322403224cdde5fdced0dabe"}, + {file = "protobuf-5.29.4-cp39-cp39-win_amd64.whl", hash = "sha256:678974e1e3a9b975b8bc2447fca458db5f93a2fb6b0c8db46b6675b5b5346812"}, + {file = "protobuf-5.29.4-py3-none-any.whl", hash = "sha256:3fde11b505e1597f71b875ef2fc52062b6a9740e5f7c8997ce878b6009145862"}, + {file = "protobuf-5.29.4.tar.gz", hash = "sha256:4f1dfcd7997b31ef8f53ec82781ff434a28bf71d9102ddde14d076adcfc78c99"}, +] + +[[package]] +name = "protovalidate" +version = "0.7.1" +description = "Protocol Buffer Validation for Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "protobuf-5.29.0-cp310-abi3-win32.whl", hash = "sha256:ea7fb379b257911c8c020688d455e8f74efd2f734b72dc1ea4b4d7e9fd1326f2"}, - {file = "protobuf-5.29.0-cp310-abi3-win_amd64.whl", hash = "sha256:34a90cf30c908f47f40ebea7811f743d360e202b6f10d40c02529ebd84afc069"}, - {file = "protobuf-5.29.0-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:c931c61d0cc143a2e756b1e7f8197a508de5365efd40f83c907a9febf36e6b43"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:85286a47caf63b34fa92fdc1fd98b649a8895db595cfa746c5286eeae890a0b1"}, - {file = "protobuf-5.29.0-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:0d10091d6d03537c3f902279fcf11e95372bdd36a79556311da0487455791b20"}, - {file = "protobuf-5.29.0-cp38-cp38-win32.whl", hash = "sha256:0cd67a1e5c2d88930aa767f702773b2d054e29957432d7c6a18f8be02a07719a"}, - {file = "protobuf-5.29.0-cp38-cp38-win_amd64.whl", hash = "sha256:e467f81fdd12ded9655cea3e9b83dc319d93b394ce810b556fb0f421d8613e86"}, - {file = "protobuf-5.29.0-cp39-cp39-win32.whl", hash = "sha256:17d128eebbd5d8aee80300aed7a43a48a25170af3337f6f1333d1fac2c6839ac"}, - {file = "protobuf-5.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:6c3009e22717c6cc9e6594bb11ef9f15f669b19957ad4087214d69e08a213368"}, - {file = "protobuf-5.29.0-py3-none-any.whl", hash = "sha256:88c4af76a73183e21061881360240c0cdd3c39d263b4e8fb570aaf83348d608f"}, - {file = "protobuf-5.29.0.tar.gz", hash = "sha256:445a0c02483869ed8513a585d80020d012c6dc60075f96fa0563a724987b1001"}, + {file = "protovalidate-0.7.1-py3-none-any.whl", hash = "sha256:6788b1baa10c2e9453c3a3eef5f87a3e9c871bc9a7110b506aefd764269c8b3e"}, + {file = "protovalidate-0.7.1.tar.gz", hash = "sha256:12bd7c126fc000c5cbee5bf0f4cd01e0ba0e353f585b0aaa68df03e788939412"}, ] +[package.dependencies] +cel-python = "*" +protobuf = "*" + [[package]] name = "psycopg" -version = "3.2.3" +version = "3.2.6" description = "PostgreSQL database adapter for Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, - {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, + {file = "psycopg-3.2.6-py3-none-any.whl", hash = "sha256:f3ff5488525890abb0566c429146add66b329e20d6d4835662b920cbbf90ac58"}, + {file = "psycopg-3.2.6.tar.gz", hash = "sha256:16fa094efa2698f260f2af74f3710f781e4a6f226efe9d1fd0c37f384639ed8a"}, ] [package.dependencies] -psycopg-binary = {version = "3.2.3", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} +psycopg-binary = {version = "3.2.6", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} tzdata = {version = "*", markers = "sys_platform == \"win32\""} [package.extras] -binary = ["psycopg-binary (==3.2.3)"] -c = ["psycopg-c (==3.2.3)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.11)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] +binary = ["psycopg-binary (==3.2.6) ; implementation_name != \"pypy\""] +c = ["psycopg-c (==3.2.6) ; implementation_name != \"pypy\""] +dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "isort-psycopg", "isort[colors] (>=6.0)", "mypy (>=1.14)", "pre-commit (>=4.0.1)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] +test = ["anyio (>=4.0)", "mypy (>=1.14)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] [[package]] name = "psycopg-binary" -version = "3.2.3" +version = "3.2.6" description = "PostgreSQL database adapter for Python -- C optimisation distribution" optional = false python-versions = ">=3.8" -files = [ - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73adc05452fb85e7a12ed3f69c81540a8875960739082e6ea5e28c373a30774"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8630943143c6d6ca9aefc88bbe5e76c90553f4e1a3b2dc339e67dc34aa86f7e"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bffb61e198a91f712cc3d7f2d176a697cb05b284b2ad150fb8edb308eba9002"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4fa2240c9fceddaa815a58f29212826fafe43ce80ff666d38c4a03fb036955"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:192a5f8496e6e1243fdd9ac20e117e667c0712f148c5f9343483b84435854c78"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64dc6e9ec64f592f19dc01a784e87267a64a743d34f68488924251253da3c818"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:79498df398970abcee3d326edd1d4655de7d77aa9aecd578154f8af35ce7bbd2"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:949551752930d5e478817e0b49956350d866b26578ced0042a61967e3fcccdea"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:80a2337e2dfb26950894c8301358961430a0304f7bfe729d34cc036474e9c9b1"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6d8f2144e0d5808c2e2aed40fbebe13869cd00c2ae745aca4b3b16a435edb056"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94253be2b57ef2fea7ffe08996067aabf56a1eb9648342c9e3bad9e10c46e045"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda0162b0dbfa5eaed6cdc708179fa27e148cb8490c7d62e5cf30713909658ea"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0419cdad8c70eaeb3116bb28e7b42d546f91baf5179d7556f230d40942dc78"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74fbf5dd3ef09beafd3557631e282f00f8af4e7a78fbfce8ab06d9cd5a789aae"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d784f614e4d53050cbe8abf2ae9d1aaacf8ed31ce57b42ce3bf2a48a66c3a5c"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4e76ce2475ed4885fe13b8254058be710ec0de74ebd8ef8224cf44a9a3358e5f"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5938b257b04c851c2d1e6cb2f8c18318f06017f35be9a5fe761ee1e2e344dfb7"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:257c4aea6f70a9aef39b2a77d0658a41bf05c243e2bf41895eb02220ac6306f3"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:06b5cc915e57621eebf2393f4173793ed7e3387295f07fed93ed3fb6a6ccf585"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:09baa041856b35598d335b1a74e19a49da8500acedf78164600694c0ba8ce21b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:48f8ca6ee8939bab760225b2ab82934d54330eec10afe4394a92d3f2a0c37dd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5361ea13c241d4f0ec3f95e0bf976c15e2e451e9cc7ef2e5ccfc9d170b197a40"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb987f14af7da7c24f803111dbc7392f5070fd350146af3345103f76ea82e339"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0463a11b1cace5a6aeffaf167920707b912b8986a9c7920341c75e3686277920"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b7be9a6c06518967b641fb15032b1ed682fd3b0443f64078899c61034a0bca6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64a607e630d9f4b2797f641884e52b9f8e239d35943f51bef817a384ec1678fe"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fa33ead69ed133210d96af0c63448b1385df48b9c0247eda735c5896b9e6dbbf"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1f8b0d0e99d8e19923e6e07379fa00570be5182c201a8c0b5aaa9a4d4a4ea20b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:709447bd7203b0b2debab1acec23123eb80b386f6c29e7604a5d4326a11e5bd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e37d5027e297a627da3551a1e962316d0f88ee4ada74c768f6c9234e26346d9"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:261f0031ee6074765096a19b27ed0f75498a8338c3dcd7f4f0d831e38adf12d1"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:41fdec0182efac66b27478ac15ef54c9ebcecf0e26ed467eb7d6f262a913318b"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:07d019a786eb020c0f984691aa1b994cb79430061065a694cf6f94056c603d26"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57615791a337378fe5381143259a6c432cdcbb1d3e6428bfb7ce59fff3fb5c"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8eb9a4e394926b93ad919cad1b0a918e9b4c846609e8c1cfb6b743683f64da0"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5905729668ef1418bd36fbe876322dcb0f90b46811bba96d505af89e6fbdce2f"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd65774ed7d65101b314808b6893e1a75b7664f680c3ef18d2e5c84d570fa393"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:700679c02f9348a0d0a2adcd33a0275717cd0d0aee9d4482b47d935023629505"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96334bb64d054e36fed346c50c4190bad9d7c586376204f50bede21a913bf942"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9099e443d4cc24ac6872e6a05f93205ba1a231b1a8917317b07c9ef2b955f1f4"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1985ab05e9abebfbdf3163a16ebb37fbc5d49aff2bf5b3d7375ff0920bbb54cd"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:e90352d7b610b4693fad0feea48549d4315d10f1eba5605421c92bb834e90170"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69320f05de8cdf4077ecd7fefdec223890eea232af0d58f2530cbda2871244a0"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4926ea5c46da30bec4a85907aa3f7e4ea6313145b2aa9469fdb861798daf1502"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64c4cd0d50d5b2288ab1bcb26c7126c772bbdebdfadcd77225a77df01c4a57e"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05a1bdce30356e70a05428928717765f4a9229999421013f41338d9680d03a63"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad357e426b0ea5c3043b8ec905546fa44b734bf11d33b3da3959f6e4447d350"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:967b47a0fd237aa17c2748fdb7425015c394a6fb57cdad1562e46a6eb070f96d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:71db8896b942770ed7ab4efa59b22eee5203be2dfdee3c5258d60e57605d688c"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2773f850a778575dd7158a6dd072f7925b67f3ba305e2003538e8831fec77a1d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aeddf7b3b3f6e24ccf7d0edfe2d94094ea76b40e831c16eff5230e040ce3b76b"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:824c867a38521d61d62b60aca7db7ca013a2b479e428a0db47d25d8ca5067410"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9994f7db390c17fc2bd4c09dca722fd792ff8a49bb3bdace0c50a83f22f1767d"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1303bf8347d6be7ad26d1362af2c38b3a90b8293e8d56244296488ee8591058e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:842da42a63ecb32612bb7f5b9e9f8617eab9bc23bd58679a441f4150fcc51c96"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb342a01c76f38a12432848e6013c57eb630103e7556cf79b705b53814c3949"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40af959173ea0d087b6b232b855cfeaa6738f47cb2a0fd10a7f4fa8b74293f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b60b465773a52c7d4705b0a751f7f1cdccf81dd12aee3b921b31a6e76b07b0e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc6d87a1c44df8d493ef44988a3ded751e284e02cdf785f746c2d357e99782a6"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f0b018e37608c3bfc6039a1dc4eb461e89334465a19916be0153c757a78ea426"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a29f5294b0b6360bfda69653697eff70aaf2908f58d1073b0acd6f6ab5b5a4f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:e56b1fd529e5dde2d1452a7d72907b37ed1b4f07fdced5d8fb1e963acfff6749"}, +groups = ["main"] +markers = "implementation_name != \"pypy\"" +files = [ + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1b639acb3e24243c23f75700bf6e3af7b76da92523ec7c3196a13aaf0b578453"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1b5c359173726b38d7acbb9f73270f269591d8031d099c1a70dd3f3d22b0e8a8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3434efe7c00f505f4c1e531519dac6c701df738ba7a1328eac81118d80019132"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bca8d9643191b13193940bbf84d51ac5a747e965c230177258fb02b8043fb7a"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55fa40f11d37e6e5149a282a5fd7e0734ce55c623673bfba638480914fd1414c"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0690ac1061c655b1bcbe9284d07bf5276bc9c0d788a6c74aaf3b042e64984b83"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9a4a9967ff650d2821d5fad6bec7b15f4c2072603e9fa3f89a39f351ade1fd3"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d6f2894cc7aee8a15fe591e8536911d9c015cb404432cf7bdac2797e54cb2ba8"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:05560c81312d7c2bee95a9860cd25198677f2320fb4a3527bc04e8cae7fcfb64"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4269cd23a485d6dd6eb6b10841c94551a53091cf0b1b6d5247a6a341f53f0d95"}, + {file = "psycopg_binary-3.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:7942f35a6f314608720116bcd9de240110ceadffd2ac5c34f68f74a31e52e46a"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7afe181f6b3eb714362e9b6a2dc2a589bff60471a1d8639fd231a4e426e01523"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34bb0fceba0773dc0bfb53224bb2c0b19dc97ea0a997a223615484cf02cae55c"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54120122d2779dcd307f49e1f921d757fe5dacdced27deab37f277eef0c52a5b"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:816aa556f63b2303e66ba6c8888a8b3f3e6e4e47049ec7a4d62c84ac60b091ca"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d19a0ba351eda9a59babf8c7c9d89c7bbc5b26bf096bc349b096bd0dd2482088"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6e197e01290ef818a092c877025fc28096adbb6d0743e313491a21aab31bd96"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:274794b4b29ef426e09086404446b61a146f5e756da71366c5a6d57abec31f7d"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:69845bdc0db519e1dfc27932cd3d5b1ecb3f72950af52a1987508ab0b52b3b55"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:66c3bed2caf0d1cabcb9365064de183b5209a7cbeaa131e79e68f350c9c963c2"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e3ae3201fe85c7f901349a2cf52f02ceca4cb97a5e2e2ac8b8a1c9a6eb747bed"}, + {file = "psycopg_binary-3.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:58f443b4df2adb59937c96775fadf4967f93d952fbcc82394446985faec11041"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f27a46ff0497e882e8c0286e8833c785b4d1a80f23e1bf606f4c90e5f9f3ce75"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b30ee4821ded7de48b8048b14952512588e7c5477b0a5965221e1798afba61a1"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e57edf3b1f5427f39660225b01f8e7b97f5cfab132092f014bf1638bc85d81d2"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c5172ce3e4ae7a4fd450070210f801e2ce6bc0f11d1208d29268deb0cda34de"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcfab3804c43571a6615e559cdc4c4115785d258a4dd71a721be033f5f5f378d"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fa1c920cce16f1205f37b20c685c58b9656b170b8b4c93629100d342d0d118e"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2e118d818101c1608c6b5ba52a6c977614d8f05aa89467501172ba4d10588e11"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:763319a8bfeca77d31512da71f5a33459b9568a7621c481c3828c62f9c38f351"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2fbc05819560389dbece046966bc88e0f2ea77673497e274c4293b8b4c1d0703"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a57f99bb953b4bd6f32d0a9844664e7f6ca5ead9ba40e96635be3cd30794813"}, + {file = "psycopg_binary-3.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:5de6809e19a465dcb9c269675bded46a135f2d600cd99f0735afbb21ddad2af4"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:54af3fbf871baa2eb19df96fd7dc0cbd88e628a692063c3d1ab5cdd00aa04322"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ad5da1e4636776c21eaeacdec42f25fa4612631a12f25cd9ab34ddf2c346ffb9"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7956b9ea56f79cd86eddcfbfc65ae2af1e4fe7932fa400755005d903c709370"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e2efb763188008cf2914820dcb9fb23c10fe2be0d2c97ef0fac7cec28e281d8"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4b3aab3451679f1e7932270e950259ed48c3b79390022d3f660491c0e65e4838"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:849a370ac4e125f55f2ad37f928e588291a67ccf91fa33d0b1e042bb3ee1f986"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:566d4ace928419d91f1eb3227fc9ef7b41cf0ad22e93dd2c3368d693cf144408"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f1981f13b10de2f11cfa2f99a8738b35b3f0a0f3075861446894a8d3042430c0"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:36f598300b55b3c983ae8df06473ad27333d2fd9f3e2cfdb913b3a5aaa3a8bcf"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0f4699fa5fe1fffb0d6b2d14b31fd8c29b7ea7375f89d5989f002aaf21728b21"}, + {file = "psycopg_binary-3.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:afe697b8b0071f497c5d4c0f41df9e038391534f5614f7fb3a8c1ca32d66e860"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da5554553b8d9fb7ab6bb1a37cc53f20ada9024916c60f40c09ab1a675323f2f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b7e3ccc43c395edba8039c9e407b01ed1844304c7f2f4aa99d34d04ed067c83"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8d55405efc8a96aa0ecb2d5d6af552d35c744f160b133fa690814a68d9a952c8"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:58d5cfb1687b69b3484a034d1aa6e5c11f0c1d46757e978ed59fab59ce83fd37"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3761c4107dab218c32ce4b10b1ae5ed686d41b882bfcb05f5bebc2be9488442f"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:45f1526e12cb480586c74670f46563d3090fc2a93e859ccf71efae61f04cef4b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:b4d4fd4415d5219785fb082e28d84be4fbd90c3bff3d861877db0aa6b0edd70b"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:eb8a1e6b8130fee0b48107739e09553d50c6f031d0b3fcc33f885bb64fa01105"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:7adf1460c05f7366f0fe9cf2d24e46abca9eb621705322bbd0c3f3e3a5edb2b4"}, + {file = "psycopg_binary-3.2.6-cp38-cp38-win_amd64.whl", hash = "sha256:28505f52ceef60554b5ab3289bf5aed2e7e57fa8e9a59a979d82db944e256a6c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:260c43c329e668606388cee78ec0dab083a25c2c6e6f9cf74a130fd5a27b0f87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9870e51fad4684dbdec057fa757d65e61cb2acb16236836e9360044c2a1ec880"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:030e9c3082a931e972b029b3cef085784a3bf7f8e18367ae50d5b809aa6e1d87"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b60c9ed291fbd5e777c2c630dcfd10b7a87d68512b0757d5e7406d9c4895a82a"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e0f4a17a9c376c195e403b4826c18f325bd28f425231d36d1036258bf893e23"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac46da609624b16d961f604b3cbc3233ef43211ef1456a188f8c427109c9c3e1"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e77949b8e7014b85cee0bf6e9e041bcae7719b2693ebf59236368fb0b2a08814"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:532322d9ef6e7d178a4f344970b017110633bcc3dc1c3403efcef55aad612517"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:880c5fd76dcb50bdcc8f87359e5a6c7eb416697cc9aa02854c91223bd999c045"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c0cddc7458b8416d77cd8829d0192466502f31d1fb853d58613cf13ac64f41c"}, + {file = "psycopg_binary-3.2.6-cp39-cp39-win_amd64.whl", hash = "sha256:ea158665676f42b19585dfe948071d3c5f28276f84a97522fb2e82c1d9194563"}, ] [[package]] name = "pyasn1" -version = "0.6.0" +version = "0.6.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pyasn1-0.6.0-py2.py3-none-any.whl", hash = "sha256:cca4bb0f2df5504f02f6f8a775b6e416ff9b0b3b16f7ee80b5a3153d9b804473"}, - {file = "pyasn1-0.6.0.tar.gz", hash = "sha256:3a35ab2c4b5ef98e17dfdec8ab074046fbda76e281c5a706ccd82328cfc8f64c"}, + {file = "pyasn1-0.6.1-py3-none-any.whl", hash = "sha256:0d632f46f2ba09143da3a8afe9e33fb6f92fa2320ab7e886e2d0f7672af84629"}, + {file = "pyasn1-0.6.1.tar.gz", hash = "sha256:6f580d2bdd84365380830acf45550f2511469f673cb4a5ae3857a3170128b034"}, ] [[package]] name = "pyasn1-modules" -version = "0.4.0" +version = "0.4.2" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pyasn1_modules-0.4.0-py3-none-any.whl", hash = "sha256:be04f15b66c206eed667e0bb5ab27e2b1855ea54a842e5037738099e8ca4ae0b"}, - {file = "pyasn1_modules-0.4.0.tar.gz", hash = "sha256:831dbcea1b177b28c9baddf4c6d1013c24c3accd14a1873fffaa6a2e905f17b6"}, + {file = "pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a"}, + {file = "pyasn1_modules-0.4.2.tar.gz", hash = "sha256:677091de870a80aae844b1ca6134f54652fa2c8c5a52aa396440ac3106e941e6"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.7.0" - -[[package]] -name = "pyavro-gen" -version = "0.3.3" -description = "A typed class generator for Avro Schemata" -optional = false -python-versions = "*" -files = [ - {file = "pyavro-gen-0.3.3.tar.gz", hash = "sha256:0e2b71c7c3c147326f555ecffcb6b2d5af4f1760b42a85f53a4fe85879f30a69"}, - {file = "pyavro_gen-0.3.3-py3-none-any.whl", hash = "sha256:452f6acb178bf7d7d9eb3c78d1978bfeecefdb3fa2937a4baf3542ae28b6dc49"}, -] - -[package.dependencies] -avro-preprocessor = ">=0.1.12" -dataclasses-avroschema = ">=0.37.1" -factory-boy = ">=3.2.1" -faker = ">=15.1.1" -isort = ">=5.10.1" -networkx = ">=2.8.7" -pygments = ">=2.13.0" -pytz = ">=2022.5" -undictify = ">=0.11.3" +pyasn1 = ">=0.6.1,<0.7.0" [[package]] name = "pycparser" @@ -1727,6 +1785,8 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\"" files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -1734,157 +1794,146 @@ files = [ [[package]] name = "pydantic" -version = "2.10.2" +version = "2.11.3" description = "Data validation using Python type hints" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e"}, - {file = "pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa"}, + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, ] [package.dependencies] annotated-types = ">=0.6.0" email-validator = {version = ">=2.0.0", optional = true, markers = "extra == \"email\""} -pydantic-core = "2.27.1" +pydantic-core = "2.33.1" typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" [package.extras] email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] [[package]] name = "pydantic-core" -version = "2.27.1" +version = "2.33.1" description = "Core functionality for Pydantic validation and serialization" optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, ] [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - [[package]] name = "pyjwt" version = "2.10.1" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, @@ -1901,28 +1950,29 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "python-box" -version = "7.2.0" +version = "7.3.2" description = "Advanced Python dictionaries with dot notation access" optional = false -python-versions = ">=3.8" -files = [ - {file = "python_box-7.2.0-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:6bdeec791e25258351388b3029a3ec5da302bb9ed3be175493c43cdc6c47f5e3"}, - {file = "python_box-7.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c449f7b3756a71479fa9c61a86e344ac00ed782a66d7662590f0afa294249d18"}, - {file = "python_box-7.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:6b0d61f182d394106d963232854e495b51edc178faa5316a797be1178212d7e0"}, - {file = "python_box-7.2.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e2d752de8c1204255bf7b0c814c59ef48293c187a7e9fdcd2fefa28024b72032"}, - {file = "python_box-7.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a6c35ea356a386077935958a5debcd5b229b9a1b3b26287a52dfe1a7e65d99"}, - {file = "python_box-7.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:32ed58ec4d9e5475efe69f9c7d773dfea90a6a01979e776da93fd2b0a5d04429"}, - {file = "python_box-7.2.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:2a2d664c6a27f7515469b6f1e461935a2038ee130b7d194b4b4db4e85d363618"}, - {file = "python_box-7.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8a5a7365db1aaf600d3e8a2747fcf6833beb5d45439a54318548f02e302e3ec"}, - {file = "python_box-7.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:739f827056ea148cbea3122d4617c994e829b420b1331183d968b175304e3a4f"}, - {file = "python_box-7.2.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:2617ef3c3d199f55f63c908f540a4dc14ced9b18533a879e6171c94a6a436f23"}, - {file = "python_box-7.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffd866bed03087b1d8340014da8c3aaae19135767580641df1b4ae6fff6ac0aa"}, - {file = "python_box-7.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:9681f059e7e92bdf20782cd9ea6e533d4711fc7b8c57a462922a025d46add4d0"}, - {file = "python_box-7.2.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:6b59b1e2741c9ceecdf5a5bd9b90502c24650e609cd824d434fed3b6f302b7bb"}, - {file = "python_box-7.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e23fae825d809ae7520fdeac88bb52be55a3b63992120a00e381783669edf589"}, - {file = "python_box-7.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:573b1abdcb7bd745fa404444f060ee62fc35a74f067181e55dcb43cfe92f2827"}, - {file = "python_box-7.2.0-py3-none-any.whl", hash = "sha256:a3c90832dd772cb0197fdb5bc06123b6e1b846899a1b53d9c39450d27a584829"}, - {file = "python_box-7.2.0.tar.gz", hash = "sha256:551af20bdab3a60a2a21e3435120453c4ca32f7393787c3a5036e1d9fc6a0ede"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_box-7.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d136163294fd61a1554db7dd203f2e3035064798d30c17d67d948f0de5c572de"}, + {file = "python_box-7.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d72e96547d8e2c2c333909826e9fae338d9a7e4cde07d5c6058cdd468432c0"}, + {file = "python_box-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:3aa52e3b5cc50c80bb7ef4be3e41e81d095310f619454a7ffd61eef3209a6225"}, + {file = "python_box-7.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:32163b1cb151883de0da62b0cd3572610dc72ccf0762f2447baf1d2562e25bea"}, + {file = "python_box-7.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:064cb59b41e25aaf7dbd39efe53151a5f6797cc1cb3c68610f0f21a9d406d67e"}, + {file = "python_box-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:488f0fba9a6416c3334b602366dcd92825adb0811e07e03753dfcf0ed79cd6f7"}, + {file = "python_box-7.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:39009a2da5c20133718b24891a206592adbe09169856aedc450ad1600fc2e511"}, + {file = "python_box-7.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2a72e2f6fb97c7e472ff3272da207ecc615aa222e52e98352391428527c469"}, + {file = "python_box-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:9eead914b9fb7d98a1473f5027dcfe27d26b3a10ffa33b9ba22cf948a23cd280"}, + {file = "python_box-7.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1dfc3b9b073f3d7cad1fa90de98eaaa684a494d0574bbc0666f74fa8307fd6b6"}, + {file = "python_box-7.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ca4685a7f764b5a71b6e08535ce2a96b7964bb63d8cb4df10f6bb7147b6c54b"}, + {file = "python_box-7.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e143295f74d47a9ab24562ead2375c9be10629599b57f2e86717d3fff60f82a9"}, + {file = "python_box-7.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f3118ab3076b645c76133b8fac51deee30237cecdcafc3af664c4b9000f04db9"}, + {file = "python_box-7.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42a760074ba12ccc247796f43b6c61f686ada4b8349ab59e2a6303b27f3ae082"}, + {file = "python_box-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ea436e7ff5f87bd728472f1e31a9e6e95572c81028c44a8e00097e0968955638"}, + {file = "python_box-7.3.2-py3-none-any.whl", hash = "sha256:fd7d74d5a848623f93b5221fd9fb00b8c00ff0e130fa87f396277aa188659c92"}, + {file = "python_box-7.3.2.tar.gz", hash = "sha256:028b9917129e67f311932d93347b8a4f1b500d7a5a2870ee3c035f4e7b19403b"}, ] [package.extras] @@ -1931,7 +1981,7 @@ msgpack = ["msgpack"] pyyaml = ["PyYAML"] ruamel-yaml = ["ruamel.yaml (>=0.17)"] toml = ["toml"] -tomli = ["tomli", "tomli-w"] +tomli = ["tomli ; python_version < \"3.11\"", "tomli-w"] yaml = ["ruamel.yaml (>=0.17)"] [[package]] @@ -1940,6 +1990,7 @@ version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, @@ -1950,13 +2001,14 @@ six = ">=1.5" [[package]] name = "python-gitlab" -version = "5.1.0" -description = "A python wrapper for the GitLab API" +version = "5.6.0" +description = "The python wrapper for the GitLab REST and GraphQL APIs." optional = false python-versions = ">=3.9.0" +groups = ["main"] files = [ - {file = "python_gitlab-5.1.0-py3-none-any.whl", hash = "sha256:c30cf547392ce66daaaf020839cfb6c15a91b26e2e7054d1b3f1b92e8dd65e7d"}, - {file = "python_gitlab-5.1.0.tar.gz", hash = "sha256:d5a10dae8328f32fb9214bd3f9dc199b4930cd496f81c9be42a0f8ff338aeb35"}, + {file = "python_gitlab-5.6.0-py3-none-any.whl", hash = "sha256:68980cd70929fc7f8f06d8a7b09bd046a6b79e1995c19d61249f046005099100"}, + {file = "python_gitlab-5.6.0.tar.gz", hash = "sha256:bc531e8ba3e5641b60409445d4919ace68a2c18cb0ec6d48fbced6616b954166"}, ] [package.dependencies] @@ -1970,13 +2022,14 @@ yaml = ["PyYaml (>=6.0.1)"] [[package]] name = "python-jsonpath" -version = "1.2.0" +version = "1.3.0" description = "JSONPath, JSON Pointer and JSON Patch for Python." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "python_jsonpath-1.2.0-py3-none-any.whl", hash = "sha256:3172c7b87098fced1ed84bd3492bd1a19ef1ad41d4f5b8a3e9a147c750ac08b3"}, - {file = "python_jsonpath-1.2.0.tar.gz", hash = "sha256:a29a84ec3ac38e5dcaa62ac2a215de72c4eb60cb1303e10700da980cf7873775"}, + {file = "python_jsonpath-1.3.0-py3-none-any.whl", hash = "sha256:ce586ec5bd934ce97bc2f06600b00437d9684138b77273ced5b70694a8ef3a76"}, + {file = "python_jsonpath-1.3.0.tar.gz", hash = "sha256:ea5eb4d9b1296c8c19cc53538eb0f20fc54128f84571559ee63539e57875fefe"}, ] [[package]] @@ -1985,6 +2038,7 @@ version = "3.0.0" description = "Universally unique lexicographically sortable identifier" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "python_ulid-3.0.0-py3-none-any.whl", hash = "sha256:e4c4942ff50dbd79167ad01ac725ec58f924b4018025ce22c858bfcff99a5e31"}, {file = "python_ulid-3.0.0.tar.gz", hash = "sha256:e50296a47dc8209d28629a22fc81ca26c00982c78934bd7766377ba37ea49a9f"}, @@ -1993,98 +2047,76 @@ files = [ [package.extras] pydantic = ["pydantic (>=2.0)"] -[[package]] -name = "pytz" -version = "2024.1" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, -] - [[package]] name = "pyyaml" -version = "6.0.1" +version = "6.0.2" description = "YAML parser and emitter for Python" optional = false -python-versions = ">=3.6" -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, - {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, - {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, - {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, - {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, - {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, - {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, - {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, - {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, - {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, - {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, - {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, - {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, - {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, - {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, -] - -[[package]] -name = "redis" -version = "5.2.0" -description = "Python client for Redis database and key-value store" -optional = false python-versions = ">=3.8" -files = [ - {file = "redis-5.2.0-py3-none-any.whl", hash = "sha256:ae174f2bb3b1bf2b09d54bf3e51fbc1469cf6c10aa03e21141f51969801a7897"}, - {file = "redis-5.2.0.tar.gz", hash = "sha256:0b1087665a771b1ff2e003aa5bdd354f15a70c9e25d5a7dbf9c722c16528a7b0"}, +groups = ["main"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] -[package.extras] -hiredis = ["hiredis (>=3.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] - [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, @@ -2106,6 +2138,7 @@ version = "2.0.0" description = "OAuthlib authentication support for Requests." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, @@ -2124,6 +2157,7 @@ version = "1.0.0" description = "A utility belt for advanced users of python-requests" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, @@ -2134,104 +2168,29 @@ requests = ">=2.0.1,<3.0.0" [[package]] name = "rsa" -version = "4.9" +version = "4.9.1" description = "Pure-Python RSA implementation" optional = false -python-versions = ">=3.6,<4" +python-versions = "<4,>=3.6" +groups = ["main"] files = [ - {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, - {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, + {file = "rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762"}, + {file = "rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75"}, ] [package.dependencies] pyasn1 = ">=0.1.3" -[[package]] -name = "ruamel-yaml" -version = "0.18.6" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} - -[package.extras] -docs = ["mercurial (>5.7)", "ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.8" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, -] - [[package]] name = "sanic" -version = "24.6.0" +version = "24.12.0" description = "A web server and web framework that's written to go fast. Build fast. Run fast." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "sanic-24.6.0-py3-none-any.whl", hash = "sha256:e2c6b392e213d85d9843cf27c64e3f2dacb3ec5c31c8c7ade4c404cd3030e994"}, - {file = "sanic-24.6.0.tar.gz", hash = "sha256:2e0841e2c8c28e68a0e6fc570c42aafbbe3b385d7141b9f96997d9d6c17d7afb"}, + {file = "sanic-24.12.0-py3-none-any.whl", hash = "sha256:3c2a01ec0b6c5926e3efe34eac1b497d31ed989038fe213eb25ad0c98687d388"}, + {file = "sanic-24.12.0.tar.gz", hash = "sha256:09c23aa917616c1e60e44c66dfd7582cb9fd6503f78298c309945909f5839836"}, ] [package.dependencies] @@ -2249,30 +2208,31 @@ uvloop = {version = ">=0.15.0", markers = "sys_platform != \"win32\" and impleme websockets = ">=10.0" [package.extras] -all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)", "towncrier", "tox", "types-ujson", "uvicorn"] -dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson", "uvicorn"] -docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx-rtd-theme (>=0.4.3)"] +all = ["autodocsumm (>=0.2.11)", "bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +dev = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "cryptography", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "towncrier", "tox", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] +docs = ["autodocsumm (>=0.2.11)", "docutils", "enum-tools[sphinx]", "m2r2", "mistune (<2.0.0)", "pygments", "sphinx (>=2.1.2)", "sphinx_rtd_theme (>=0.4.3)"] ext = ["sanic-ext"] http3 = ["aioquic"] -test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson", "uvicorn"] +test = ["bandit", "beautifulsoup4", "chardet (==3.*)", "coverage", "docutils", "mypy", "pygments", "pytest (>=8.2.2)", "pytest-benchmark", "pytest-sanic", "ruff", "sanic-testing (>=23.6.0)", "slotscheck (>=0.8.0,<1)", "types-ujson ; sys_platform != \"win32\" and implementation_name == \"cpython\"", "uvicorn"] [[package]] name = "sanic-ext" -version = "23.12.0" +version = "24.12.0" description = "Extend your Sanic installation with some core functionality." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "sanic-ext-23.12.0.tar.gz", hash = "sha256:42fc41e7fafa58f3b790f685f3dd8a8de281460b4169d0e91f4e11b8747f845c"}, - {file = "sanic_ext-23.12.0-py3-none-any.whl", hash = "sha256:3ba2c143d7c41d89b87a11c6214b9d9b52c3994ff8ce3a03792b54ec5627e2c3"}, + {file = "sanic_ext-24.12.0-py3-none-any.whl", hash = "sha256:861f809f071770cf28acd5f13e97ed59985e07361b13b4b4540da1333730c83e"}, + {file = "sanic_ext-24.12.0.tar.gz", hash = "sha256:8f912f4c29f242bc638346d09b79f0c8896ff64e79bd0e7fa09eac4b6c0e23c8"}, ] [package.dependencies] pyyaml = ">=3.0.0" [package.extras] -dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] -test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic-testing (>=22.9.0)", "tox"] +dev = ["Jinja2", "black (>=21.4b2)", "coverage", "flake8 (>=3.7.7)", "isort (>=5.0.0)", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] +test = ["Jinja2", "coverage", "pytest", "pytest-asyncio", "pytest-cov", "sanic_testing (>=22.9.0)", "tox"] [[package]] name = "sanic-routing" @@ -2280,6 +2240,7 @@ version = "23.12.0" description = "Core routing component for Sanic" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sanic-routing-23.12.0.tar.gz", hash = "sha256:1dcadc62c443e48c852392dba03603f9862b6197fc4cba5bbefeb1ace0848b04"}, {file = "sanic_routing-23.12.0-py3-none-any.whl", hash = "sha256:1558a72afcb9046ed3134a5edae02fc1552cff08f0fff2e8d5de0877ea43ed73"}, @@ -2287,13 +2248,14 @@ files = [ [[package]] name = "sentry-sdk" -version = "2.19.0" +version = "2.26.1" description = "Python client for Sentry (https://sentry.io)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "sentry_sdk-2.19.0-py2.py3-none-any.whl", hash = "sha256:7b0b3b709dee051337244a09a30dbf6e95afe0d34a1f8b430d45e0982a7c125b"}, - {file = "sentry_sdk-2.19.0.tar.gz", hash = "sha256:ee4a4d2ae8bfe3cac012dcf3e4607975904c137e1738116549fc3dbbb6ff0e36"}, + {file = "sentry_sdk-2.26.1-py2.py3-none-any.whl", hash = "sha256:e99390e3f217d13ddcbaeaed08789f1ca614d663b345b9da42e35ad6b60d696a"}, + {file = "sentry_sdk-2.26.1.tar.gz", hash = "sha256:759e019c41551a21519a95e6cef6d91fb4af1054761923dadaee2e6eca9c02c7"}, ] [package.dependencies] @@ -2338,37 +2300,41 @@ sanic = ["sanic (>=0.8)"] sqlalchemy = ["sqlalchemy (>=1.2)"] starlette = ["starlette (>=0.19.1)"] starlite = ["starlite (>=1.48)"] +statsig = ["statsig (>=0.55.3)"] tornado = ["tornado (>=6)"] +unleash = ["UnleashClient (>=6.0.1)"] [[package]] name = "setuptools" -version = "75.6.0" +version = "75.9.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, + {file = "setuptools-75.9.1-py3-none-any.whl", hash = "sha256:0a6f876d62f4d978ca1a11ab4daf728d1357731f978543ff18ecdbf9fd071f73"}, + {file = "setuptools-75.9.1.tar.gz", hash = "sha256:b6eca2c3070cdc82f71b4cb4bb2946bc0760a210d11362278cf1ff394e6ea32c"}, ] [package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -2377,98 +2343,89 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, ] -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - [[package]] name = "sqlalchemy" -version = "2.0.36" +version = "2.0.40" description = "Database Abstraction Library" optional = false python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, +groups = ["main"] +files = [ + {file = "SQLAlchemy-2.0.40-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ae9597cab738e7cc823f04a704fb754a9249f0b6695a6aeb63b74055cd417a96"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37a5c21ab099a83d669ebb251fddf8f5cee4d75ea40a5a1653d9c43d60e20867"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bece9527f5a98466d67fb5d34dc560c4da964240d8b09024bb21c1246545e04e"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:8bb131ffd2165fae48162c7bbd0d97c84ab961deea9b8bab16366543deeab625"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:9408fd453d5f8990405cc9def9af46bfbe3183e6110401b407c2d073c3388f47"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win32.whl", hash = "sha256:00a494ea6f42a44c326477b5bee4e0fc75f6a80c01570a32b57e89cf0fbef85a"}, + {file = "SQLAlchemy-2.0.40-cp37-cp37m-win_amd64.whl", hash = "sha256:c7b927155112ac858357ccf9d255dd8c044fd9ad2dc6ce4c4149527c901fa4c3"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1ea21bef99c703f44444ad29c2c1b6bd55d202750b6de8e06a955380f4725d7"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:afe63b208153f3a7a2d1a5b9df452b0673082588933e54e7c8aac457cf35e758"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8aae085ea549a1eddbc9298b113cffb75e514eadbb542133dd2b99b5fb3b6af"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ea9181284754d37db15156eb7be09c86e16e50fbe77610e9e7bee09291771a1"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5434223b795be5c5ef8244e5ac98056e290d3a99bdcc539b916e282b160dda00"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15d08d5ef1b779af6a0909b97be6c1fd4298057504eb6461be88bd1696cb438e"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win32.whl", hash = "sha256:cd2f75598ae70bcfca9117d9e51a3b06fe29edd972fdd7fd57cc97b4dbf3b08a"}, + {file = "sqlalchemy-2.0.40-cp310-cp310-win_amd64.whl", hash = "sha256:2cbafc8d39ff1abdfdda96435f38fab141892dc759a2165947d1a8fffa7ef596"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f6bacab7514de6146a1976bc56e1545bee247242fab030b89e5f70336fc0003e"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5654d1ac34e922b6c5711631f2da497d3a7bffd6f9f87ac23b35feea56098011"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35904d63412db21088739510216e9349e335f142ce4a04b69e2528020ee19ed4"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c7a80ed86d6aaacb8160a1caef6680d4ddd03c944d985aecee940d168c411d1"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:519624685a51525ddaa7d8ba8265a1540442a2ec71476f0e75241eb8263d6f51"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2ee5f9999a5b0e9689bed96e60ee53c3384f1a05c2dd8068cc2e8361b0df5b7a"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win32.whl", hash = "sha256:c0cae71e20e3c02c52f6b9e9722bca70e4a90a466d59477822739dc31ac18b4b"}, + {file = "sqlalchemy-2.0.40-cp311-cp311-win_amd64.whl", hash = "sha256:574aea2c54d8f1dd1699449f332c7d9b71c339e04ae50163a3eb5ce4c4325ee4"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9d3b31d0a1c44b74d3ae27a3de422dfccd2b8f0b75e51ecb2faa2bf65ab1ba0d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:37f7a0f506cf78c80450ed1e816978643d3969f99c4ac6b01104a6fe95c5490a"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0bb933a650323e476a2e4fbef8997a10d0003d4da996aad3fd7873e962fdde4d"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6959738971b4745eea16f818a2cd086fb35081383b078272c35ece2b07012716"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:110179728e442dae85dd39591beb74072ae4ad55a44eda2acc6ec98ead80d5f2"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e8040680eaacdce4d635f12c55c714f3d4c7f57da2bc47a01229d115bd319191"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win32.whl", hash = "sha256:650490653b110905c10adac69408380688cefc1f536a137d0d69aca1069dc1d1"}, + {file = "sqlalchemy-2.0.40-cp312-cp312-win_amd64.whl", hash = "sha256:2be94d75ee06548d2fc591a3513422b873490efb124048f50556369a834853b0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:915866fd50dd868fdcc18d61d8258db1bf9ed7fbd6dfec960ba43365952f3b01"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a4c5a2905a9ccdc67a8963e24abd2f7afcd4348829412483695c59e0af9a705"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:55028d7a3ebdf7ace492fab9895cbc5270153f75442a0472d8516e03159ab364"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cfedff6878b0e0d1d0a50666a817ecd85051d12d56b43d9d425455e608b5ba0"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bb19e30fdae77d357ce92192a3504579abe48a66877f476880238a962e5b96db"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:16d325ea898f74b26ffcd1cf8c593b0beed8714f0317df2bed0d8d1de05a8f26"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win32.whl", hash = "sha256:a669cbe5be3c63f75bcbee0b266779706f1a54bcb1000f302685b87d1b8c1500"}, + {file = "sqlalchemy-2.0.40-cp313-cp313-win_amd64.whl", hash = "sha256:641ee2e0834812d657862f3a7de95e0048bdcb6c55496f39c6fa3d435f6ac6ad"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:50f5885bbed261fc97e2e66c5156244f9704083a674b8d17f24c72217d29baf5"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:cf0e99cdb600eabcd1d65cdba0d3c91418fee21c4aa1d28db47d095b1064a7d8"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe147fcd85aaed53ce90645c91ed5fca0cc88a797314c70dfd9d35925bd5d106"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf7cee56bd552385c1ee39af360772fbfc2f43be005c78d1140204ad6148438"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:4aeb939bcac234b88e2d25d5381655e8353fe06b4e50b1c55ecffe56951d18c2"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c268b5100cfeaa222c40f55e169d484efa1384b44bf9ca415eae6d556f02cb08"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win32.whl", hash = "sha256:46628ebcec4f23a1584fb52f2abe12ddb00f3bb3b7b337618b80fc1b51177aff"}, + {file = "sqlalchemy-2.0.40-cp38-cp38-win_amd64.whl", hash = "sha256:7e0505719939e52a7b0c65d20e84a6044eb3712bb6f239c6b1db77ba8e173a37"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c884de19528e0fcd9dc34ee94c810581dd6e74aef75437ff17e696c2bfefae3e"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1abb387710283fc5983d8a1209d9696a4eae9db8d7ac94b402981fe2fe2e39ad"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cfa124eda500ba4b0d3afc3e91ea27ed4754e727c7f025f293a22f512bcd4c9"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b6b28d303b9d57c17a5164eb1fd2d5119bb6ff4413d5894e74873280483eeb5"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b5a5bbe29c10c5bfd63893747a1bf6f8049df607638c786252cb9243b86b6706"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f0fda83e113bb0fb27dc003685f32a5dcb99c9c4f41f4fa0838ac35265c23b5c"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win32.whl", hash = "sha256:957f8d85d5e834397ef78a6109550aeb0d27a53b5032f7a57f2451e1adc37e98"}, + {file = "sqlalchemy-2.0.40-cp39-cp39-win_amd64.whl", hash = "sha256:1ffdf9c91428e59744f8e6f98190516f8e1d05eec90e936eb08b257332c5e870"}, + {file = "sqlalchemy-2.0.40-py3-none-any.whl", hash = "sha256:32587e2e1e359276957e6fe5dad089758bc042a971a8a09ae8ecf7a8fe23d07a"}, + {file = "sqlalchemy-2.0.40.tar.gz", hash = "sha256:d827099289c64589418ebbcaead0145cd19f4e3e8a93919a0100247af245fa00"}, ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} +greenlet = {version = ">=1", optional = true, markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\") or extra == \"asyncio\""} typing-extensions = ">=4.6.0" [package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (>=1)"] +aioodbc = ["aioodbc", "greenlet (>=1)"] +aiosqlite = ["aiosqlite", "greenlet (>=1)", "typing_extensions (!=3.10.0.1)"] +asyncio = ["greenlet (>=1)"] +asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (>=1)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] mssql = ["pyodbc"] mssql-pymssql = ["pymssql"] @@ -2479,7 +2436,7 @@ mysql-connector = ["mysql-connector-python"] oracle = ["cx_oracle (>=8)"] oracle-oracledb = ["oracledb (>=1.0.1)"] postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] +postgresql-asyncpg = ["asyncpg", "greenlet (>=1)"] postgresql-pg8000 = ["pg8000 (>=1.29.1)"] postgresql-psycopg = ["psycopg (>=3.0.7)"] postgresql-psycopg2binary = ["psycopg2-binary"] @@ -2490,13 +2447,14 @@ sqlcipher = ["sqlcipher3_binary"] [[package]] name = "tenacity" -version = "9.0.0" +version = "9.1.2" description = "Retry code until it succeeds" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, + {file = "tenacity-9.1.2-py3-none-any.whl", hash = "sha256:f77bf36710d8b73a50b2dd155c97b870017ad21afe6ab300326b0371b3b05138"}, + {file = "tenacity-9.1.2.tar.gz", hash = "sha256:1169d376c297e7de388d18b4481760d478b0e99a777cad3a9c86e556f4b697cb"}, ] [package.extras] @@ -2509,6 +2467,7 @@ version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, @@ -2520,6 +2479,7 @@ version = "1.1.1" description = "Human-readable HTML tracebacks for Python exceptions" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "tracerite-1.1.1-py3-none-any.whl", hash = "sha256:3a787a9ecb1a136ea9ce17e6328e414ec414a4f644130af4e1e330bec2dece29"}, {file = "tracerite-1.1.1.tar.gz", hash = "sha256:6400a35a187747189e4bb8d4a8e471bd86d14dbdcc94bcad23f4eda023f41356"}, @@ -2529,80 +2489,67 @@ files = [ html5tagger = ">=1.2.1" [[package]] -name = "types-cffi" -version = "1.16.0.20240331" -description = "Typing stubs for cffi" +name = "types-python-dateutil" +version = "2.9.0.20241206" +description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "types-cffi-1.16.0.20240331.tar.gz", hash = "sha256:b8b20d23a2b89cfed5f8c5bc53b0cb8677c3aac6d970dbc771e28b9c698f5dee"}, - {file = "types_cffi-1.16.0.20240331-py3-none-any.whl", hash = "sha256:a363e5ea54a4eb6a4a105d800685fde596bc318089b025b27dee09849fe41ff0"}, + {file = "types_python_dateutil-2.9.0.20241206-py3-none-any.whl", hash = "sha256:e248a4bc70a486d3e3ec84d0dc30eec3a5f979d6e7ee4123ae043eedbb987f53"}, + {file = "types_python_dateutil-2.9.0.20241206.tar.gz", hash = "sha256:18f493414c26ffba692a72369fea7a154c502646301ebfe3d56a04b3767284cb"}, ] -[package.dependencies] -types-setuptools = "*" - [[package]] -name = "types-pyopenssl" -version = "24.1.0.20240425" -description = "Typing stubs for pyOpenSSL" +name = "types-pyyaml" +version = "6.0.12.20250402" +description = "Typing stubs for PyYAML" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "types-pyOpenSSL-24.1.0.20240425.tar.gz", hash = "sha256:0a7e82626c1983dc8dc59292bf20654a51c3c3881bcbb9b337c1da6e32f0204e"}, - {file = "types_pyOpenSSL-24.1.0.20240425-py3-none-any.whl", hash = "sha256:f51a156835555dd2a1f025621e8c4fbe7493470331afeef96884d1d29bf3a473"}, + {file = "types_pyyaml-6.0.12.20250402-py3-none-any.whl", hash = "sha256:652348fa9e7a203d4b0d21066dfb00760d3cbd5a15ebb7cf8d33c88a49546681"}, + {file = "types_pyyaml-6.0.12.20250402.tar.gz", hash = "sha256:d7c13c3e6d335b6af4b0122a01ff1d270aba84ab96d1a1a1063ecba3e13ec075"}, ] -[package.dependencies] -cryptography = ">=35.0.0" -types-cffi = "*" - [[package]] -name = "types-redis" -version = "4.6.0.20241004" -description = "Typing stubs for redis" +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "types-redis-4.6.0.20241004.tar.gz", hash = "sha256:5f17d2b3f9091ab75384153bfa276619ffa1cf6a38da60e10d5e6749cc5b902e"}, - {file = "types_redis-4.6.0.20241004-py3-none-any.whl", hash = "sha256:ef5da68cb827e5f606c8f9c0b49eeee4c2669d6d97122f301d3a55dc6a63f6ed"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] -[package.dependencies] -cryptography = ">=35.0.0" -types-pyOpenSSL = "*" - [[package]] -name = "types-setuptools" -version = "70.0.0.20240524" -description = "Typing stubs for setuptools" +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "types-setuptools-70.0.0.20240524.tar.gz", hash = "sha256:e31fee7b9d15ef53980526579ac6089b3ae51a005a281acf97178e90ac71aff6"}, - {file = "types_setuptools-70.0.0.20240524-py3-none-any.whl", hash = "sha256:8f5379b9948682d72a9ab531fbe52932e84c4f38deda570255f9bae3edd766bc"}, + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, ] -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] +[package.dependencies] +typing-extensions = ">=4.12.0" [[package]] name = "tzdata" -version = "2024.1" +version = "2025.2" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ - {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, - {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, ] [[package]] @@ -2611,6 +2558,8 @@ version = "5.10.0" description = "Ultra fast JSON encoder and decoder for Python" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" files = [ {file = "ujson-5.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2601aa9ecdbee1118a1c2065323bda35e2c5a2cf0797ef4522d485f9d3ef65bd"}, {file = "ujson-5.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:348898dd702fc1c4f1051bc3aacbf894caa0927fe2c53e68679c073375f732cf"}, @@ -2698,6 +2647,7 @@ version = "0.11.3" description = "Type-checked function calls at runtime" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "undictify-0.11.3-py3-none-any.whl", hash = "sha256:4bfdc075b2f06ee027b05e241434c8efcbebf6c83fcc5b8d9d8def56dab4b5ff"}, {file = "undictify-0.11.3.tar.gz", hash = "sha256:1481170ed8b9862c033e7549d817b90cead6002677c602d1bbdbf8ea15100098"}, @@ -2705,17 +2655,18 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.4.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] @@ -2726,6 +2677,8 @@ version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" +groups = ["main"] +markers = "sys_platform != \"win32\" and implementation_name == \"cpython\"" files = [ {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, @@ -2777,6 +2730,7 @@ version = "1.8.0" description = "WebSocket client for Python with low level API options" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "websocket_client-1.8.0-py3-none-any.whl", hash = "sha256:17b44cc997f5c498e809b22cdf2d9c7a9e71c02c8cc2b6c56e7c2d1239bfa526"}, {file = "websocket_client-1.8.0.tar.gz", hash = "sha256:3239df9f44da632f96012472805d40a23281a991027ce11d2f45a6f24ac4c3da"}, @@ -2789,83 +2743,81 @@ test = ["websockets"] [[package]] name = "websockets" -version = "12.0" +version = "15.0.1" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-12.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d554236b2a2006e0ce16315c16eaa0d628dab009c33b63ea03f41c6107958374"}, - {file = "websockets-12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2d225bb6886591b1746b17c0573e29804619c8f755b5598d875bb4235ea639be"}, - {file = "websockets-12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb809e816916a3b210bed3c82fb88eaf16e8afcf9c115ebb2bacede1797d2547"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c588f6abc13f78a67044c6b1273a99e1cf31038ad51815b3b016ce699f0d75c2"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5aa9348186d79a5f232115ed3fa9020eab66d6c3437d72f9d2c8ac0c6858c558"}, - {file = "websockets-12.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6350b14a40c95ddd53e775dbdbbbc59b124a5c8ecd6fbb09c2e52029f7a9f480"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:70ec754cc2a769bcd218ed8d7209055667b30860ffecb8633a834dde27d6307c"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e96f5ed1b83a8ddb07909b45bd94833b0710f738115751cdaa9da1fb0cb66e8"}, - {file = "websockets-12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4d87be612cbef86f994178d5186add3d94e9f31cc3cb499a0482b866ec477603"}, - {file = "websockets-12.0-cp310-cp310-win32.whl", hash = "sha256:befe90632d66caaf72e8b2ed4d7f02b348913813c8b0a32fae1cc5fe3730902f"}, - {file = "websockets-12.0-cp310-cp310-win_amd64.whl", hash = "sha256:363f57ca8bc8576195d0540c648aa58ac18cf85b76ad5202b9f976918f4219cf"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5d873c7de42dea355d73f170be0f23788cf3fa9f7bed718fd2830eefedce01b4"}, - {file = "websockets-12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3f61726cae9f65b872502ff3c1496abc93ffbe31b278455c418492016e2afc8f"}, - {file = "websockets-12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed2fcf7a07334c77fc8a230755c2209223a7cc44fc27597729b8ef5425aa61a3"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e332c210b14b57904869ca9f9bf4ca32f5427a03eeb625da9b616c85a3a506c"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5693ef74233122f8ebab026817b1b37fe25c411ecfca084b29bc7d6efc548f45"}, - {file = "websockets-12.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e9e7db18b4539a29cc5ad8c8b252738a30e2b13f033c2d6e9d0549b45841c04"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6e2df67b8014767d0f785baa98393725739287684b9f8d8a1001eb2839031447"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bea88d71630c5900690fcb03161ab18f8f244805c59e2e0dc4ffadae0a7ee0ca"}, - {file = "websockets-12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dff6cdf35e31d1315790149fee351f9e52978130cef6c87c4b6c9b3baf78bc53"}, - {file = "websockets-12.0-cp311-cp311-win32.whl", hash = "sha256:3e3aa8c468af01d70332a382350ee95f6986db479ce7af14d5e81ec52aa2b402"}, - {file = "websockets-12.0-cp311-cp311-win_amd64.whl", hash = "sha256:25eb766c8ad27da0f79420b2af4b85d29914ba0edf69f547cc4f06ca6f1d403b"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0e6e2711d5a8e6e482cacb927a49a3d432345dfe7dea8ace7b5790df5932e4df"}, - {file = "websockets-12.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:dbcf72a37f0b3316e993e13ecf32f10c0e1259c28ffd0a85cee26e8549595fbc"}, - {file = "websockets-12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12743ab88ab2af1d17dd4acb4645677cb7063ef4db93abffbf164218a5d54c6b"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b645f491f3c48d3f8a00d1fce07445fab7347fec54a3e65f0725d730d5b99cb"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9893d1aa45a7f8b3bc4510f6ccf8db8c3b62120917af15e3de247f0780294b92"}, - {file = "websockets-12.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f38a7b376117ef7aff996e737583172bdf535932c9ca021746573bce40165ed"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f764ba54e33daf20e167915edc443b6f88956f37fb606449b4a5b10ba42235a5"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:1e4b3f8ea6a9cfa8be8484c9221ec0257508e3a1ec43c36acdefb2a9c3b00aa2"}, - {file = "websockets-12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9fdf06fd06c32205a07e47328ab49c40fc1407cdec801d698a7c41167ea45113"}, - {file = "websockets-12.0-cp312-cp312-win32.whl", hash = "sha256:baa386875b70cbd81798fa9f71be689c1bf484f65fd6fb08d051a0ee4e79924d"}, - {file = "websockets-12.0-cp312-cp312-win_amd64.whl", hash = "sha256:ae0a5da8f35a5be197f328d4727dbcfafa53d1824fac3d96cdd3a642fe09394f"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5f6ffe2c6598f7f7207eef9a1228b6f5c818f9f4d53ee920aacd35cec8110438"}, - {file = "websockets-12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9edf3fc590cc2ec20dc9d7a45108b5bbaf21c0d89f9fd3fd1685e223771dc0b2"}, - {file = "websockets-12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8572132c7be52632201a35f5e08348137f658e5ffd21f51f94572ca6c05ea81d"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:604428d1b87edbf02b233e2c207d7d528460fa978f9e391bd8aaf9c8311de137"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a9d160fd080c6285e202327aba140fc9a0d910b09e423afff4ae5cbbf1c7205"}, - {file = "websockets-12.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87b4aafed34653e465eb77b7c93ef058516cb5acf3eb21e42f33928616172def"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b2ee7288b85959797970114deae81ab41b731f19ebcd3bd499ae9ca0e3f1d2c8"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7fa3d25e81bfe6a89718e9791128398a50dec6d57faf23770787ff441d851967"}, - {file = "websockets-12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a571f035a47212288e3b3519944f6bf4ac7bc7553243e41eac50dd48552b6df7"}, - {file = "websockets-12.0-cp38-cp38-win32.whl", hash = "sha256:3c6cc1360c10c17463aadd29dd3af332d4a1adaa8796f6b0e9f9df1fdb0bad62"}, - {file = "websockets-12.0-cp38-cp38-win_amd64.whl", hash = "sha256:1bf386089178ea69d720f8db6199a0504a406209a0fc23e603b27b300fdd6892"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ab3d732ad50a4fbd04a4490ef08acd0517b6ae6b77eb967251f4c263011a990d"}, - {file = "websockets-12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1d9697f3337a89691e3bd8dc56dea45a6f6d975f92e7d5f773bc715c15dde28"}, - {file = "websockets-12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1df2fbd2c8a98d38a66f5238484405b8d1d16f929bb7a33ed73e4801222a6f53"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23509452b3bc38e3a057382c2e941d5ac2e01e251acce7adc74011d7d8de434c"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e5fc14ec6ea568200ea4ef46545073da81900a2b67b3e666f04adf53ad452ec"}, - {file = "websockets-12.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46e71dbbd12850224243f5d2aeec90f0aaa0f2dde5aeeb8fc8df21e04d99eff9"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b81f90dcc6c85a9b7f29873beb56c94c85d6f0dac2ea8b60d995bd18bf3e2aae"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a02413bc474feda2849c59ed2dfb2cddb4cd3d2f03a2fedec51d6e959d9b608b"}, - {file = "websockets-12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bbe6013f9f791944ed31ca08b077e26249309639313fff132bfbf3ba105673b9"}, - {file = "websockets-12.0-cp39-cp39-win32.whl", hash = "sha256:cbe83a6bbdf207ff0541de01e11904827540aa069293696dd528a6640bd6a5f6"}, - {file = "websockets-12.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc4e7fa5414512b481a2483775a8e8be7803a35b30ca805afa4998a84f9fd9e8"}, - {file = "websockets-12.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:248d8e2446e13c1d4326e0a6a4e9629cb13a11195051a73acf414812700badbd"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f44069528d45a933997a6fef143030d8ca8042f0dfaad753e2906398290e2870"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e37d36f0d19f0a4413d3e18c0d03d0c268ada2061868c1e6f5ab1a6d575077"}, - {file = "websockets-12.0-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d829f975fc2e527a3ef2f9c8f25e553eb7bc779c6665e8e1d52aa22800bb38b"}, - {file = "websockets-12.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2c71bd45a777433dd9113847af751aae36e448bc6b8c361a566cb043eda6ec30"}, - {file = "websockets-12.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0bee75f400895aef54157b36ed6d3b308fcab62e5260703add87f44cee9c82a6"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:423fc1ed29f7512fceb727e2d2aecb952c46aa34895e9ed96071821309951123"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27a5e9964ef509016759f2ef3f2c1e13f403725a5e6a1775555994966a66e931"}, - {file = "websockets-12.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3181df4583c4d3994d31fb235dc681d2aaad744fbdbf94c4802485ececdecf2"}, - {file = "websockets-12.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b067cb952ce8bf40115f6c19f478dc71c5e719b7fbaa511359795dfd9d1a6468"}, - {file = "websockets-12.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:00700340c6c7ab788f176d118775202aadea7602c5cc6be6ae127761c16d6b0b"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e469d01137942849cff40517c97a30a93ae79917752b34029f0ec72df6b46399"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffefa1374cd508d633646d51a8e9277763a9b78ae71324183693959cf94635a7"}, - {file = "websockets-12.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba0cab91b3956dfa9f512147860783a1829a8d905ee218a9837c18f683239611"}, - {file = "websockets-12.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2cb388a5bfb56df4d9a406783b7f9dbefb888c09b71629351cc6b036e9259370"}, - {file = "websockets-12.0-py3-none-any.whl", hash = "sha256:dc284bbc8d7c78a6c69e0c7325ab46ee5e40bb4d50e494d8131a07ef47500e9e"}, - {file = "websockets-12.0.tar.gz", hash = "sha256:81df9cbcbb6c260de1e007e58c011bfebe2dafc8435107b0537f393dd38c8b1b"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, ] [[package]] @@ -2874,6 +2826,7 @@ version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, @@ -2891,6 +2844,7 @@ version = "1.2.0" description = "WebSockets state-machine based protocol implementation" optional = false python-versions = ">=3.7.0" +groups = ["main"] files = [ {file = "wsproto-1.2.0-py3-none-any.whl", hash = "sha256:b9acddd652b585d75b20477888c56642fdade28bdfd3579aa24a4d2c037dd736"}, {file = "wsproto-1.2.0.tar.gz", hash = "sha256:ad565f26ecb92588a3e43bc3d96164de84cd9902482b130d0ddbaa9664a85065"}, @@ -2900,6 +2854,6 @@ files = [ h11 = ">=0.9.0,<1" [metadata] -lock-version = "2.0" -python-versions = "^3.12" -content-hash = "85bddfadbd7069d06668d1cf92d9b3145e787bb2cbbed3382dc0eb9dbdb15eb3" +lock-version = "2.1" +python-versions = "^3.13" +content-hash = "f026cd687c8ddf81d556e50abb84521956adac7e28d4b78602c7b04731a98b78" diff --git a/projects/secrets_storage/pyproject.toml b/projects/secrets_storage/pyproject.toml index 834c2b7d8..626b18bd4 100644 --- a/projects/secrets_storage/pyproject.toml +++ b/projects/secrets_storage/pyproject.toml @@ -1,10 +1,15 @@ -[tool.poetry] +[project] name = "secrets_storage" version = "0.1.0" description = "" -authors = ['Swiss Data Science Center '] -license = "Apache License 2.0" +authors = [ + { name = "Swiss Data Science Center", email = "contact@datascience.ch" }, +] +license = "" +requires-python = ">=3.13" +dynamic = ["dependencies"] +[tool.poetry] packages = [ { include = "renku_data_services/secrets_storage_api", from = "../../bases" }, { include = "renku_data_services/data_api", from = "../../bases" }, @@ -20,6 +25,7 @@ packages = [ { include = "renku_data_services/errors", from = "../../components" }, { include = "renku_data_services/git", from = "../../components" }, { include = "renku_data_services/k8s", from = "../../components" }, + { include = "renku_data_services/k8s_watcher", from = "../../components" }, { include = "renku_data_services/message_queue", from = "../../components" }, { include = "renku_data_services/migrations", from = "../../components" }, { include = "renku_data_services/namespace", from = "../../components" }, @@ -33,51 +39,53 @@ packages = [ { include = "renku_data_services/utils", from = "../../components" }, { include = "renku_data_services/data_connectors", from = "../../components" }, { include = "renku_data_services/notebooks", from = "../../components" }, + { include = "renku_data_services/solr", from = "../../components" }, + { include = "renku_data_services/search", from = "../../components" }, + { include = "renku_data_services/metrics", from = "../../components" }, ] [tool.poetry.dependencies] -python = "^3.12" -sanic = { extras = ["ext"], version = "^24.6.0" } -pydantic = "^2.10.2" +python = "^3.13" +sanic = { extras = ["ext"], version = "^24.12.0" } +pydantic = "^2.10.6" datamodel-code-generator = "^0.24.2" -sqlalchemy = { extras = ["asyncio"], version = "^2.0.36" } -alembic = "^1.14.0" +sqlalchemy = { extras = ["asyncio"], version = "^2.0.38" } +alembic = "^1.14.1" asyncpg = "^0.30.0" pyjwt = { extras = ["crypto"], version = "^2.10.1" } tenacity = "^9.0.0" httpx = "<0.29" kubernetes = "^31.0.0" python-ulid = "^3.0.0" -python-gitlab = "^5.1.0" +python-gitlab = "^5.6.0" psycopg = { version = "^3.2.3", extras = ["binary"] } -urllib3 = "^2.2.3" +urllib3 = "^2.3.0" deepmerge = "^2.0" -authlib = "^1.3.2" -redis = "^5.2.0" -dataclasses-avroschema = "^0.65.4" +authlib = "^1.5.0" undictify = "^0.11.3" -types-redis = "^4.6.0.20241004" prometheus-sanic = "^3.0.0" -sentry-sdk = { version = "^2.19.0", extras = ["sanic"] } -cryptography = "^44.0.0" -authzed = "^1.1.0" +sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } +cryptography = "^44.0.1" +authzed = "^1.20.0" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore -setuptools = { version = "^75.6.0" } +setuptools = { version = "^75.8.2" } aiofile = "^3.9.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. -protobuf = "^5.29.0" +protobuf = "^5.29.3" escapism = "^1.0.1" -kr8s = "^0.18.1" -marshmallow = "^3.23.1" +kr8s = "^0.20.7" +python-box = "^7.0.1" +marshmallow = "^3.26.1" toml = "^0.10.2" werkzeug = "^3.1.3" +parsy = "^2.1" +sanic-ext = "^24.12.0" +posthog = "^3.21.0" +markdown-code-runner = "^2.2.0" [tool.poetry.group.dev.dependencies] -pyavro-gen = "^0.3.3" -avro-preprocessor = "^0.3.0" -fakeredis = "^2.26.1" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/pyproject.toml b/pyproject.toml index dc45cd3c3..d2e069a8c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,13 +1,19 @@ -[tool.poetry] +[project] name = "renku-data-services" version = "0.0.1" description = "Collection of services that deal with data in Postgres." -authors = ["Swiss Data Science Center "] +authors = [ + { name = "Swiss Data Science Center", email = "contact@datascience.ch" }, +] readme = "README.md" license = "Apache License 2.0" +requires-python = ">=3.13" +dynamic = ["dependencies"] + +[tool.poetry] packages = [ { include = "renku_data_services/data_api", from = "bases" }, - { include = "renku_data_services/background_jobs", from = "bases" }, + { include = "renku_data_services/data_tasks", from = "bases" }, { include = "renku_data_services/authn", from = "components" }, { include = "renku_data_services/db_config", from = "components" }, { include = "renku_data_services/app_config", from = "components" }, @@ -33,73 +39,78 @@ packages = [ { include = "renku_data_services/notebooks", from = "components" }, { include = "renku_data_services/platform", from = "components" }, { include = "renku_data_services/data_connectors", from = "components" }, + { include = "renku_data_services/solr", from = "components" }, + { include = "renku_data_services/search", from = "components" }, + { include = "renku_data_services/metrics", from = "components" }, ] [tool.poetry.dependencies] -python = "^3.12" -sanic = { extras = ["ext"], version = "^24.6.0" } -pydantic = { extras = ["email"], version = "^2.10.2" } -datamodel-code-generator = "^0.24.2" -sqlalchemy = { extras = ["asyncio"], version = "^2.0.36" } -alembic = "^1.14.0" +python = "^3.13" +sanic = { extras = ["ext"], version = "^24.12.0" } +pydantic = { extras = ["email"], version = "^2.10.6" } +datamodel-code-generator = "^0.28.4" +sqlalchemy = { extras = ["asyncio"], version = "^2.0.38" } +alembic = "^1.14.1" asyncpg = "^0.30.0" pyjwt = { extras = ["crypto"], version = "^2.10.1" } tenacity = "^9.0.0" httpx = "<0.29" kubernetes = "^31.0.0" python-ulid = "^3.0.0" -python-gitlab = "^5.1.0" +python-gitlab = "^5.6.0" psycopg = { version = "^3.2.3", extras = ["binary"] } -urllib3 = "^2.2.3" +urllib3 = "^2.3.0" deepmerge = "^2.0" -authlib = "^1.3.2" -redis = "^5.2.0" -dataclasses-avroschema = "^0.65.4" +authlib = "^1.5.0" undictify = "^0.11.3" -types-redis = "^4.6.0.20241004" prometheus-sanic = "^3.0.0" prometheus_client = "^0.7.1" -kubernetes-asyncio = "^31.1.0" -marshmallow = "^3.23.1" +kubernetes-asyncio = "^32.0.0" +marshmallow = "^3.26.1" escapism = "^1.0.1" -sentry-sdk = { version = "^2.19.0", extras = ["sanic"] } -authzed = "^1.1.0" -cryptography = "^44.0.0" +sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } +authzed = "^1.20.0" +cryptography = "^44.0.1" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore -setuptools = { version = "^75.6.0" } -kr8s = "^0.18.1" +setuptools = { version = "^75.8.2" } +kr8s = "^0.20.7" +python-box = "^7.0.1" werkzeug = "^3.1.3" toml = "^0.10.2" aiofiles = "^24.1.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. -protobuf = "^5.29.0" +protobuf = "^5.29.3" +poetry = "^2.1.1" +parsy = "^2.1" +sanic-ext = "^24.12.0" +posthog = "^3.21.0" +markdown-code-runner = "^2.2.0" [tool.poetry.group.dev.dependencies] -bandit = "^1.8.0" -mypy = "~1.13" +bandit = "^1.8.3" +mypy = "~1.15.0" pytest = "^8.3.4" pytest-cov = "^6.0.0" -pre-commit = "^4.0.1" +pre-commit = "^4.1.0" sanic-testing = "^24.6.0" aiosqlite = "^0.20.0" -types-pyyaml = "^6.0.12.20240917" -schemathesis = "~3.29.2" -pytest-asyncio = "^0.21.1" +types-pyyaml = "^6.0.12.20241230" +schemathesis = "==3.39.7" +pytest-asyncio = "^0.21.2" pytest-postgresql = "^6.1.1" types-urllib3 = "^1.26.25.14" -pyavro-gen = "^0.3.3" -avro-preprocessor = "^0.3.0" -fakeredis = "^2.26.1" -ruff = "^0.8.1" -debugpy = "^1.8.9" -pytest-xdist = { version = "^3.5.0", extras = ["psutil"] } +ruff = "^0.8.6" +debugpy = "^1.8.12" +pytest-xdist = { version = "^3.6.1", extras = ["psutil"] } types-requests = "^2.32.0.20241016" types-toml = "^0.10.8.20240310" -types-aiofiles = "^24.1.0.20240626" +types-aiofiles = "^24.1.0.20241221" pytest-mock = "^3.14.0" uvloop = "^0.21.0" +syrupy = "^4.8.2" +ruamel-yaml = "^0.18.14" [build-system] requires = ["poetry-core"] @@ -111,10 +122,10 @@ target-version = "py311" output-format = "full" include = ["*.py", "*.pyi"] exclude = [ - "*/avro_models/*", ".devcontainer/", "components/renku_data_services/notebooks/cr_amalthea_session.py", "components/renku_data_services/notebooks/cr_jupyter_server.py", + "components/renku_data_services/session/cr_shipwright_buildrun.py", ] [tool.ruff.format] @@ -135,6 +146,11 @@ select = [ "UP", # flake8-simplify "SIM", + # unused-noqa https://docs.astral.sh/ruff/rules/unused-noqa/ + "RUF100", + # flake8-bugbear checks are pretty useful + # https://docs.astral.sh/ruff/rules/#flake8-bugbear-b + "B", ] ignore = [ "D105", @@ -153,8 +169,9 @@ ignore = [ [tool.ruff.lint.per-file-ignores] "test/*" = ["D"] "*/versions/*" = ["D", "E", "W"] -"apispec.py" = ["D", "E", "W", "I", "UP"] +"apispec.py" = ["D", "E", "W", "I", "UP", "F401"] "components/renku_data_services/notebooks/crs.py" = ["F401"] +"components/renku_data_services/session/crs.py" = ["F401"] [tool.ruff.lint.isort] known-first-party = ["renku_data_services", "test"] @@ -186,7 +203,7 @@ postgresql_password = "renku" asyncio_mode = "auto" [tool.mypy] -python_version = "3.12" +python_version = "3.13" mypy_path = ["components", "bases"] files = ["bases/**/*.py", "components/**/*.py"] namespace_packages = true @@ -227,7 +244,7 @@ module = [ "renku_data_services.notebooks.cr_amalthea_session", "renku_data_services.notebooks.cr_jupyter_server", "renku_data_services.platform.apispec", - "renku_data_services.message_queue.apispec", + "renku_data_services.search.apispec", ] ignore_errors = true @@ -237,8 +254,6 @@ module = [ "aiofiles.*", "authlib.*", "authzed.*", - "avro_preprocessor.*", - "dataclasses_avroschema", "deepmerge.*", "grpc.*", "grpcutil.*", @@ -246,12 +261,14 @@ module = [ "kubernetes_asyncio.*", "prometheus_client.*", "prometheus_sanic.*", - "pyavro_gen.*", "sanic_testing.*", "undictify.*", "urllib3.*", "escapism.*", "kr8s.*", + "parsy.*", + "posthog.*", + "markdown_code_runner" ] ignore_missing_imports = true diff --git a/registries.yaml b/registries.yaml new file mode 100644 index 000000000..aa620bf0f --- /dev/null +++ b/registries.yaml @@ -0,0 +1,4 @@ +mirrors: + "shipwright-registry.local:5000": + endpoint: + - http://k3d-shipwright-registry.local:5000 diff --git a/setup-k3d-cluster.sh b/setup-k3d-cluster.sh new file mode 100755 index 000000000..6fc224508 --- /dev/null +++ b/setup-k3d-cluster.sh @@ -0,0 +1,178 @@ +#!/usr/bin/env bash + +SHIPWRIGHT_VERSION=v0.14.0 +INTERNL_RE="host\.k3d\.internal" +REGISTRY_NAME="dev-registry.local" +REGISTRY_PORT=5000 +REGISTRY_URI=$REGISTRY_NAME:$REGISTRY_PORT +K3D_REGISTRY_NAME="k3d-$REGISTRY_NAME" +CLUSTER_NAME="devel" + +function delete_all() { + k3d cluster delete $CLUSTER_NAME + k3d registry delete $REGISTRY_NAME +} + +function setup_registry() { + set +e + registry=$(k3d registry list | grep $REGISTRY_NAME) + + set -e + + if [[ "$registry" == "" ]] + then + echo "Creating registry $REGISTRY_URI" + k3d registry create $REGISTRY_NAME -p $REGISTRY_PORT + else + echo "Registry $REGISTRY_NAME already exist -> reusing." + fi +} + +function setup_cluster() { + set +e + cluster=$(k3d cluster list | grep $CLUSTER_NAME) + + set -e + + if [[ "$cluster" == "" ]] + then + echo "Creating cluster $CLUSTER_NAME" + k3d cluster create $CLUSTER_NAME --registry-use $K3D_REGISTRY_NAME:$REGISTRY_PORT --registry-config registries.yaml --agents 1 --k3s-arg --disable=metrics-server@server:0 + else + echo "Cluster $CLUSTER_NAME already exist -> reusing." + fi +} + +function setup_dns() { + echo "Updating the cluster DNS configuration to make the registry accessible" + + # Wait for the DNS to contain the internal entry + internal_added=false + + until [ $internal_added == true ] + do + + configmap=$(kubectl get configmaps --namespace kube-system coredns -o yaml) + if [[ $configmap =~ $INTERNL_RE ]] + then + internal_added=true + fi + done + + # Add entry to the DNS so that the API of the registry can be accessed + kubectl get configmaps --namespace kube-system coredns -o yaml | sed -e "s/\(host.k3d.internal\)/\\1 $REGISTRY_NAME/g" | kubectl apply -f - + # Restart the coredns pod to take into account the config change + coredns_pod=$(kubectl --namespace kube-system get pods | grep coredns | awk '{print $1}') + kubectl --namespace kube-system delete pod "$coredns_pod" --wait=true + # Wait for the pod to be back on track + coredns_pod=$(kubectl --namespace kube-system get pods | grep coredns | awk '{print $1}') + kubectl --namespace kube-system wait --for=condition=Ready "pod/$coredns_pod" +} + +function copy_image() { + # copy image to registry + echo "Copying image from source registry to $REGISTRY_URI" + kubectl create job copy-image --image quay.io/skopeo/stable:latest -- skopeo copy docker://paketobuildpacks/builder-jammy-base:latest docker://$REGISTRY_URI/paketobuildpacks/builder-jammy-base:latest --dest-tls-verify=false + kubectl wait --for=condition=complete job/copy-image + kubectl delete job copy-image +} + +function setup_shipwright() { + # Setup tekton + curl --silent --location https://raw.githubusercontent.com/shipwright-io/build/$SHIPWRIGHT_VERSION/hack/install-tekton.sh | bash + + # Setup Shipwright + kubectl apply --filename https://github.com/shipwright-io/build/releases/download/$SHIPWRIGHT_VERSION/release.yaml --server-side + curl --silent --location https://raw.githubusercontent.com/shipwright-io/build/$SHIPWRIGHT_VERSION/hack/setup-webhook-cert.sh | bash + curl --silent --location https://raw.githubusercontent.com/shipwright-io/build/main/hack/storage-version-migration.sh | bash + + # Install Shipwright strategies + kubectl apply --filename https://github.com/shipwright-io/build/releases/download/$SHIPWRIGHT_VERSION/sample-strategies.yaml --server-side + + set -x +} + +function test_shipwright_build() { + cat < list[BaseException]: - return [ValueError("x is not set"), ValueError("y is not set")] - - with pytest.raises(BackgroundJobError) as exc_info: - await error_handler([err1(), err2()]) - - assert len(exc_info.value.errors) == 3 - - exc_str = str(exc_info.value) - assert errors.ValidationError.message in exc_str - assert "x is not set" in exc_str - assert "y is not set" in exc_str diff --git a/test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr b/test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr new file mode 100644 index 000000000..33c594a3e --- /dev/null +++ b/test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_get_all_projects_with_pagination + list([ + dict({ + 'created_by': 'user', + 'is_template': False, + 'keywords': list([ + ]), + 'name': 'Project 1', + 'namespace': 'user.doe', + 'repositories': list([ + ]), + 'secrets_mount_directory': '/secrets', + 'slug': 'project-1', + 'visibility': 'private', + }), + ]) +# --- +# name: test_project_copy_basics + dict({ + 'created_by': 'user', + 'description': 'Template project', + 'is_template': False, + 'keywords': list([ + 'tag 1', + 'tag 2', + ]), + 'name': 'Renku Native Project', + 'namespace': 'user.doe', + 'repositories': list([ + 'http://repository-1.ch', + 'http://repository-2.ch', + ]), + 'secrets_mount_directory': '/secrets', + 'slug': 'project-slug', + 'visibility': 'public', + }) +# --- +# name: test_project_copy_basics.1 + dict({ + 'created_by': 'user', + 'description': 'Template project', + 'documentation': 'test documentation', + 'is_template': False, + 'keywords': list([ + 'tag 1', + 'tag 2', + ]), + 'name': 'Renku Native Project', + 'namespace': 'user.doe', + 'repositories': list([ + 'http://repository-1.ch', + 'http://repository-2.ch', + ]), + 'secrets_mount_directory': '/secrets', + 'slug': 'project-slug', + 'visibility': 'public', + }) +# --- +# name: test_project_copy_creates_new_build_and_environment_instances + dict({ + 'description': 'A session launcher.', + 'disk_storage': 42, + 'name': 'Launcher', + 'resource_class_id': 1, + }) +# --- +# name: test_project_copy_creates_new_build_and_environment_instances.1 + dict({ + 'build_parameters': dict({ + 'builder_variant': 'python', + 'frontend_variant': 'vscodium', + 'repository': 'https://github.com/some/repo', + }), + 'container_image': 'image:unknown-at-the-moment', + 'default_url': '/', + 'description': 'Generated environment for Launcher', + 'environment_image_source': 'build', + 'environment_kind': 'CUSTOM', + 'gid': 1000, + 'is_archived': False, + 'mount_directory': '/home/renku/work', + 'name': 'Launcher', + 'port': 8888, + 'uid': 1000, + 'working_directory': '/home/renku/work', + }) +# --- +# name: test_project_copy_creates_new_build_and_environment_instances.2 + dict({ + 'builder_variant': 'python', + 'frontend_variant': 'vscodium', + 'repository': 'https://github.com/some/repo', + }) +# --- diff --git a/test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr b/test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr new file mode 100644 index 000000000..a508d42ef --- /dev/null +++ b/test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr @@ -0,0 +1,132 @@ +# serializer version: 1 +# name: test_get_all_session_environments + list([ + dict({ + 'args': list([ + '/entrypoint.sh jupyter server --ServerApp.ip=0.0.0.0 --ServerApp.port=8888 --ServerApp.base_url=$RENKU_BASE_URL_PATH --ServerApp.token="" --ServerApp.password="" --ServerApp.allow_remote_access=true --ContentsManager.allow_hidden=true --ServerApp.allow_origin=* --ServerApp.root_dir="/home/jovyan/work"', + ]), + 'command': list([ + 'sh', + '-c', + ]), + 'container_image': 'renku/renkulab-py:latest', + 'default_url': '/lab', + 'description': 'Standard python environment', + 'gid': 100, + 'is_archived': False, + 'mount_directory': '/home/jovyan/work', + 'name': 'Python/Jupyter', + 'port': 8888, + 'uid': 1000, + 'working_directory': '/home/jovyan/work', + }), + dict({ + 'args': list([ + '/entrypoint.sh jupyter server --ServerApp.ip=0.0.0.0 --ServerApp.port=8888 --ServerApp.base_url=$RENKU_BASE_URL_PATH --ServerApp.token="" --ServerApp.password="" --ServerApp.allow_remote_access=true --ContentsManager.allow_hidden=true --ServerApp.allow_origin=* --ServerApp.root_dir="/home/jovyan/work"', + ]), + 'command': list([ + 'sh', + '-c', + ]), + 'container_image': 'renku/renkulab-r:latest', + 'default_url': '/rstudio', + 'description': 'Standard R environment', + 'gid': 100, + 'is_archived': False, + 'mount_directory': '/home/jovyan/work', + 'name': 'Rstudio', + 'port': 8888, + 'uid': 1000, + 'working_directory': '/home/jovyan/work', + }), + dict({ + 'container_image': 'some_image:some_tag', + 'default_url': '/lab', + 'description': 'A session environment.', + 'gid': 1000, + 'is_archived': False, + 'name': 'Environment 1', + 'port': 8080, + 'uid': 1000, + }), + dict({ + 'container_image': 'some_image:some_tag', + 'default_url': '/lab', + 'description': 'A session environment.', + 'gid': 1000, + 'is_archived': False, + 'name': 'Environment 2', + 'port': 8080, + 'uid': 1000, + }), + dict({ + 'container_image': 'some_image:some_tag', + 'default_url': '/lab', + 'description': 'A session environment.', + 'gid': 1000, + 'is_archived': False, + 'name': 'Environment 3', + 'port': 8080, + 'uid': 1000, + }), + dict({ + 'container_image': 'some_image:some_tag', + 'default_url': '/lab', + 'description': 'A session environment.', + 'gid': 1000, + 'is_archived': True, + 'name': 'Environment 4', + 'port': 8080, + 'uid': 1000, + }), + ]) +# --- +# name: test_get_all_session_launchers + list([ + dict({ + 'description': 'A session launcher.', + 'environment': dict({ + 'container_image': 'some_image:some_tag', + 'default_url': '/lab', + 'environment_image_source': 'image', + 'environment_kind': 'CUSTOM', + 'gid': 1000, + 'is_archived': False, + 'name': 'Test', + 'port': 8080, + 'uid': 1000, + }), + 'name': 'Launcher 1', + }), + dict({ + 'description': 'A session launcher.', + 'environment': dict({ + 'container_image': 'some_image:some_tag', + 'default_url': '/lab', + 'environment_image_source': 'image', + 'environment_kind': 'CUSTOM', + 'gid': 1000, + 'is_archived': False, + 'name': 'Test', + 'port': 8080, + 'uid': 1000, + }), + 'name': 'Launcher 2', + }), + dict({ + 'description': 'A session launcher.', + 'environment': dict({ + 'container_image': 'some_image:some_tag', + 'default_url': '/lab', + 'environment_image_source': 'image', + 'environment_kind': 'CUSTOM', + 'gid': 1000, + 'is_archived': False, + 'name': 'Test', + 'port': 8080, + 'uid': 1000, + }), + 'name': 'Launcher 3', + }), + ]) +# --- diff --git a/test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr b/test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr new file mode 100644 index 000000000..e2c9ff941 --- /dev/null +++ b/test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr @@ -0,0 +1,18067 @@ +# serializer version: 1 +# name: test_storage_creation[payload0-201-s3] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'provider': 'AWS', + 'region': 'us-east-1', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'bucket/myfolder', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload1-201-s3] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'provider': 'AWS', + 'region': 'us-east-1', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'bucket/myfolder', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload10-201-s3] + dict({ + 'sensitive_fields': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + AWS Secret Access Key (password). + + Leave blank for anonymous access or runtime credentials. + ''', + 'ispassword': False, + 'name': 'secret_access_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + ]), + 'storage': dict({ + 'configuration': dict({ + 'provider': 'AWS', + 'secret_access_key': '', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'bucket/myfolder', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload2-201-s3] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'provider': 'AWS', + 'region': 'us-east-2', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'mybucket/myfolder', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload3-201-s3] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'provider': 'AWS', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'giab', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload4-201-s3] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'provider': 'AWS', + 'region': 'us-east-2', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': False, + 'source_path': 'mybucket/myfolder', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload5-201-s3] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'endpoint': 'my.provider.com', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'mybucket/myfolder', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload6-201-azureblob] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'type': 'azureblob', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'mycontainer/myfolder', + 'storage_type': 'azureblob', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload7-201-azureblob] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'account': 'myaccount', + 'type': 'azureblob', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'myfolder', + 'storage_type': 'azureblob', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload8-201-azureblob] + dict({ + 'sensitive_fields': list([ + ]), + 'storage': dict({ + 'configuration': dict({ + 'account': 'myaccount', + 'type': 'azureblob', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'myfolder', + 'storage_type': 'azureblob', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_creation[payload9-201-s3] + dict({ + 'sensitive_fields': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + AWS Secret Access Key (password). + + Leave blank for anonymous access or runtime credentials. + ''', + 'ispassword': False, + 'name': 'secret_access_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + ]), + 'storage': dict({ + 'configuration': dict({ + 'provider': 'AWS', + 'secret_access_key': '', + 'type': 's3', + }), + 'name': 'mystorage', + 'project_id': '123456', + 'readonly': True, + 'source_path': 'bucket/myfolder', + 'storage_type': 's3', + 'target_path': 'my/target', + }), + }) +# --- +# name: test_storage_schema_patches + list([ + dict({ + 'description': 'Microsoft Azure Blob Storage', + 'name': 'azureblob', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Azure Storage Account Name. + + Set this to the Azure Storage Account Name in use. + + Leave blank to use SAS URL or Emulator, otherwise it needs to be set. + + If this is blank and if env_auth is set it will be read from the + environment variable `AZURE_STORAGE_ACCOUNT_NAME` if possible. + + ''', + 'ispassword': False, + 'name': 'account', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Read credentials from runtime (environment variables, CLI or MSI). + + See the [authentication docs](/azureblob#authentication) for full info. + ''', + 'ispassword': False, + 'name': 'env_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Storage Account Shared Key. + + Leave blank to use SAS URL or Emulator. + ''', + 'ispassword': False, + 'name': 'key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + SAS URL for container level access only. + + Leave blank if using account/key or Emulator. + ''', + 'ispassword': False, + 'name': 'sas_url', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the service principal's tenant. Also called its directory ID. + + Set this if using + - Service principal with client secret + - Service principal with certificate + - User with username and password + + ''', + 'ispassword': False, + 'name': 'tenant', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The ID of the client in use. + + Set this if using + - Service principal with client secret + - Service principal with certificate + - User with username and password + + ''', + 'ispassword': False, + 'name': 'client_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + One of the service principal's client secrets + + Set this if using + - Service principal with client secret + + ''', + 'ispassword': False, + 'name': 'client_secret', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Path to a PEM or PKCS12 certificate file including the private key. + + Set this if using + - Service principal with certificate + + ''', + 'ispassword': False, + 'name': 'client_certificate_path', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Password for the certificate file (optional). + + Optionally set this if using + - Service principal with certificate + + And the certificate has a password. + + ''', + 'ispassword': True, + 'name': 'client_certificate_password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Send the certificate chain when using certificate auth. + + Specifies whether an authentication request will include an x5c header + to support subject name / issuer based authentication. When set to + true, authentication requests include the x5c header. + + Optionally set this if using + - Service principal with certificate + + ''', + 'ispassword': False, + 'name': 'client_send_certificate_chain', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + User name (usually an email address) + + Set this if using + - User with username and password + + ''', + 'ispassword': False, + 'name': 'username', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The user's password + + Set this if using + - User with username and password + + ''', + 'ispassword': True, + 'name': 'password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Path to file containing credentials for use with a service principal. + + Leave blank normally. Needed only if you want to use a service principal instead of interactive login. + + $ az ad sp create-for-rbac --name "" \ + --role "Storage Blob Data Owner" \ + --scopes "/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts//blobServices/default/containers/" \ + > azure-principal.json + + See ["Create an Azure service principal"](https://docs.microsoft.com/en-us/cli/azure/create-an-azure-service-principal-azure-cli) and ["Assign an Azure role for access to blob data"](https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-rbac-cli) pages for more details. + + It may be more convenient to put the credentials directly into the + rclone config file under the `client_id`, `tenant` and `client_secret` + keys instead of setting `service_principal_file`. + + ''', + 'ispassword': False, + 'name': 'service_principal_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Skip requesting Microsoft Entra instance metadata + + This should be set true only by applications authenticating in + disconnected clouds, or private clouds such as Azure Stack. + + It determines whether rclone requests Microsoft Entra instance + metadata from `https://login.microsoft.com/` before + authenticating. + + Setting this to true will skip this request, making you responsible + for ensuring the configured authority is valid and trustworthy. + + ''', + 'ispassword': False, + 'name': 'disable_instance_discovery', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use a managed service identity to authenticate (only works in Azure). + + When true, use a [managed service identity](https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/) + to authenticate to Azure Storage instead of a SAS token or account key. + + If the VM(SS) on which this program is running has a system-assigned identity, it will + be used by default. If the resource has no system-assigned but exactly one user-assigned identity, + the user-assigned identity will be used by default. If the resource has multiple user-assigned + identities, the identity to use must be explicitly specified using exactly one of the msi_object_id, + msi_client_id, or msi_mi_res_id parameters. + ''', + 'ispassword': False, + 'name': 'use_msi', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Object ID of the user-assigned MSI to use, if any. + + Leave blank if msi_client_id or msi_mi_res_id specified. + ''', + 'ispassword': False, + 'name': 'msi_object_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Object ID of the user-assigned MSI to use, if any. + + Leave blank if msi_object_id or msi_mi_res_id specified. + ''', + 'ispassword': False, + 'name': 'msi_client_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Azure resource ID of the user-assigned MSI to use, if any. + + Leave blank if msi_client_id or msi_object_id specified. + ''', + 'ispassword': False, + 'name': 'msi_mi_res_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Uses local storage emulator if provided as 'true'. + + Leave blank if using real azure storage endpoint. + ''', + 'ispassword': False, + 'name': 'use_emulator', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use Azure CLI tool az for authentication + + Set to use the [Azure CLI tool az](https://learn.microsoft.com/en-us/cli/azure/) + as the sole means of authentication. + + Setting this can be useful if you wish to use the az CLI on a host with + a System Managed Identity that you do not want to use. + + Don't set env_auth at the same time. + + ''', + 'ispassword': False, + 'name': 'use_az', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for the service. + + Leave blank normally. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Cutoff for switching to chunked upload (<= 256 MiB) (deprecated).', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 4194304.0, + 'default_str': '4Mi', + 'exclusive': False, + 'help': ''' + Upload chunk size. + + Note that this is stored in memory and there may be up to + "--transfers" * "--azureblob-upload-concurrency" chunks stored at once + in memory. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 16.0, + 'default_str': '16', + 'exclusive': False, + 'help': ''' + Concurrency for multipart uploads. + + This is the number of chunks of the same file that are uploaded + concurrently. + + If you are uploading small numbers of large files over high-speed + links and these uploads do not fully utilize your bandwidth, then + increasing this may help to speed up the transfers. + + In tests, upload speed increases almost linearly with upload + concurrency. For example to fill a gigabit pipe it may be necessary to + raise this to 64. Note that this will use more memory. + + Note that chunks are stored in memory and there may be up to + "--transfers" * "--azureblob-upload-concurrency" chunks stored at once + in memory. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 8388608.0, + 'default_str': '8Mi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to multipart copy. + + Any files larger than this that need to be server-side copied will be + copied in chunks of chunk_size using the put block list API. + + Files smaller than this limit will be copied with the Copy Blob API. + ''', + 'ispassword': False, + 'name': 'copy_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 512.0, + 'default_str': '512', + 'exclusive': False, + 'help': ''' + Concurrency for multipart copy. + + This is the number of chunks of the same file that are copied + concurrently. + + These chunks are not buffered in memory and Microsoft recommends + setting this value to greater than 1000 in the azcopy documentation. + + https://learn.microsoft.com/en-us/azure/storage/common/storage-use-azcopy-optimize#increase-concurrency + + In tests, copy speed increases almost linearly with copy + concurrency. + ''', + 'ispassword': False, + 'name': 'copy_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Whether to use the Copy Blob API when copying to the same storage account. + + If true (the default) then rclone will use the Copy Blob API for + copies to the same storage account even when the size is above the + copy_cutoff. + + Rclone assumes that the same storage account means the same config + and does not check for the same storage account in different configs. + + There should be no need to change this value. + + ''', + 'ispassword': False, + 'name': 'use_copy_blob', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 5000.0, + 'default_str': '5000', + 'exclusive': False, + 'help': ''' + Size of blob list. + + This sets the number of blobs requested in each listing chunk. Default + is the maximum, 5000. "List blobs" requests are permitted 2 minutes + per megabyte to complete. If an operation is taking longer than 2 + minutes per megabyte on average, it will time out ( + [source](https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-blob-service-operations#exceptions-to-default-timeout-interval) + ). This can be used to limit the number of blobs items to return, to + avoid the time out. + ''', + 'ispassword': False, + 'name': 'list_chunk', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Access tier of blob: hot, cool, cold or archive. + + Archived blobs can be restored by setting access tier to hot, cool or + cold. Leave blank if you intend to use default access tier, which is + set at account level + + If there is no "access tier" specified, rclone doesn't apply any tier. + rclone performs "Set Tier" operation on blobs while uploading, if objects + are not modified, specifying "access tier" to new one will have no effect. + If blobs are in "archive tier" at remote, trying to perform data transfer + operations from remote will not be allowed. User should first restore by + tiering blob to "Hot", "Cool" or "Cold". + ''', + 'ispassword': False, + 'name': 'access_tier', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Delete archive tier blobs before overwriting. + + Archive tier blobs cannot be updated. So without this flag, if you + attempt to update an archive tier blob, then rclone will produce the + error: + + can't update archive tier blob without --azureblob-archive-tier-delete + + With this flag set then before rclone attempts to overwrite an archive + tier blob, it will delete the existing blob before uploading its + replacement. This has the potential for data loss if the upload fails + (unlike updating a normal blob) and also may cost more since deleting + archive tier blobs early may be chargable. + + ''', + 'ispassword': False, + 'name': 'archive_tier_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Don't store MD5 checksum with object metadata. + + Normally rclone will calculate the MD5 checksum of the input before + uploading it so it can add it to metadata on the object. This is great + for data integrity checking but can cause long delays for large files + to start uploading. + ''', + 'ispassword': False, + 'name': 'disable_checksum', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': 'How often internal memory buffer pools will be flushed. (no longer used)', + 'ispassword': False, + 'name': 'memory_pool_flush_time', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Whether to use mmap buffers in internal memory pool. (no longer used)', + 'ispassword': False, + 'name': 'memory_pool_use_mmap', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 21078018.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The container and its blobs can be accessed only with an authorized request. + It's a default value. + ''', + 'value': '', + }), + dict({ + 'help': 'Blob data within this container can be read via anonymous request.', + 'value': 'blob', + }), + dict({ + 'help': 'Allow full public read access for container and blob data.', + 'value': 'container', + }), + ]), + 'exclusive': False, + 'help': 'Public access level of a container: blob or container.', + 'ispassword': False, + 'name': 'public_access', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Upload an empty object with a trailing slash when a new directory is created + + Empty folders are unsupported for bucket based remotes, this option + creates an empty object ending with "/", to persist the folder. + + This object also has the metadata "hdi_isfolder = true" to conform to + the Microsoft standard. + + ''', + 'ispassword': False, + 'name': 'directory_markers', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set, don't attempt to check the container exists or create it. + + This can be useful when trying to minimise the number of transactions + rclone does if you know the container exists already. + + ''', + 'ispassword': False, + 'name': 'no_check_container', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'If set, do not do HEAD before GET when getting objects.', + 'ispassword': False, + 'name': 'no_head_object', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'By default, the delete operation fails if a blob has snapshots', + 'value': '', + }), + dict({ + 'help': "Specify 'include' to remove the root blob and all its snapshots", + 'value': 'include', + }), + dict({ + 'help': "Specify 'only' to remove only the snapshots but keep the root blob.", + 'value': 'only', + }), + ]), + 'exclusive': True, + 'help': 'Set to specify how to deal with snapshots on blob deletion.', + 'ispassword': False, + 'name': 'delete_snapshots', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'azureblob', + }), + dict({ + 'description': 'Microsoft Azure Files', + 'name': 'azurefiles', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Azure Storage Account Name. + + Set this to the Azure Storage Account Name in use. + + Leave blank to use SAS URL or connection string, otherwise it needs to be set. + + If this is blank and if env_auth is set it will be read from the + environment variable `AZURE_STORAGE_ACCOUNT_NAME` if possible. + + ''', + 'ispassword': False, + 'name': 'account', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Azure Files Share Name. + + This is required and is the name of the share to access. + + ''', + 'ispassword': False, + 'name': 'share_name', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Read credentials from runtime (environment variables, CLI or MSI). + + See the [authentication docs](/azurefiles#authentication) for full info. + ''', + 'ispassword': False, + 'name': 'env_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Storage Account Shared Key. + + Leave blank to use SAS URL or connection string. + ''', + 'ispassword': False, + 'name': 'key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + SAS URL. + + Leave blank if using account/key or connection string. + ''', + 'ispassword': False, + 'name': 'sas_url', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Azure Files Connection String.', + 'ispassword': False, + 'name': 'connection_string', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the service principal's tenant. Also called its directory ID. + + Set this if using + - Service principal with client secret + - Service principal with certificate + - User with username and password + + ''', + 'ispassword': False, + 'name': 'tenant', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The ID of the client in use. + + Set this if using + - Service principal with client secret + - Service principal with certificate + - User with username and password + + ''', + 'ispassword': False, + 'name': 'client_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + One of the service principal's client secrets + + Set this if using + - Service principal with client secret + + ''', + 'ispassword': False, + 'name': 'client_secret', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Path to a PEM or PKCS12 certificate file including the private key. + + Set this if using + - Service principal with certificate + + ''', + 'ispassword': False, + 'name': 'client_certificate_path', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Password for the certificate file (optional). + + Optionally set this if using + - Service principal with certificate + + And the certificate has a password. + + ''', + 'ispassword': True, + 'name': 'client_certificate_password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Send the certificate chain when using certificate auth. + + Specifies whether an authentication request will include an x5c header + to support subject name / issuer based authentication. When set to + true, authentication requests include the x5c header. + + Optionally set this if using + - Service principal with certificate + + ''', + 'ispassword': False, + 'name': 'client_send_certificate_chain', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + User name (usually an email address) + + Set this if using + - User with username and password + + ''', + 'ispassword': False, + 'name': 'username', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The user's password + + Set this if using + - User with username and password + + ''', + 'ispassword': True, + 'name': 'password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Path to file containing credentials for use with a service principal. + + Leave blank normally. Needed only if you want to use a service principal instead of interactive login. + + $ az ad sp create-for-rbac --name "" \ + --role "Storage Files Data Owner" \ + --scopes "/subscriptions//resourceGroups//providers/Microsoft.Storage/storageAccounts//blobServices/default/containers/" \ + > azure-principal.json + + See ["Create an Azure service principal"](https://docs.microsoft.com/en-us/cli/azure/create-an-azure-service-principal-azure-cli) and ["Assign an Azure role for access to files data"](https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-rbac-cli) pages for more details. + + **NB** this section needs updating for Azure Files - pull requests appreciated! + + It may be more convenient to put the credentials directly into the + rclone config file under the `client_id`, `tenant` and `client_secret` + keys instead of setting `service_principal_file`. + + ''', + 'ispassword': False, + 'name': 'service_principal_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use a managed service identity to authenticate (only works in Azure). + + When true, use a [managed service identity](https://docs.microsoft.com/en-us/azure/active-directory/managed-identities-azure-resources/) + to authenticate to Azure Storage instead of a SAS token or account key. + + If the VM(SS) on which this program is running has a system-assigned identity, it will + be used by default. If the resource has no system-assigned but exactly one user-assigned identity, + the user-assigned identity will be used by default. If the resource has multiple user-assigned + identities, the identity to use must be explicitly specified using exactly one of the msi_object_id, + msi_client_id, or msi_mi_res_id parameters. + ''', + 'ispassword': False, + 'name': 'use_msi', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Object ID of the user-assigned MSI to use, if any. + + Leave blank if msi_client_id or msi_mi_res_id specified. + ''', + 'ispassword': False, + 'name': 'msi_object_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Object ID of the user-assigned MSI to use, if any. + + Leave blank if msi_object_id or msi_mi_res_id specified. + ''', + 'ispassword': False, + 'name': 'msi_client_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Azure resource ID of the user-assigned MSI to use, if any. + + Leave blank if msi_client_id or msi_object_id specified. + ''', + 'ispassword': False, + 'name': 'msi_mi_res_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Skip requesting Microsoft Entra instance metadata + This should be set true only by applications authenticating in + disconnected clouds, or private clouds such as Azure Stack. + It determines whether rclone requests Microsoft Entra instance + metadata from `https://login.microsoft.com/` before + authenticating. + Setting this to true will skip this request, making you responsible + for ensuring the configured authority is valid and trustworthy. + + ''', + 'ispassword': False, + 'name': 'disable_instance_discovery', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use Azure CLI tool az for authentication + Set to use the [Azure CLI tool az](https://learn.microsoft.com/en-us/cli/azure/) + as the sole means of authentication. + Setting this can be useful if you wish to use the az CLI on a host with + a System Managed Identity that you do not want to use. + Don't set env_auth at the same time. + + ''', + 'ispassword': False, + 'name': 'use_az', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for the service. + + Leave blank normally. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 4194304.0, + 'default_str': '4Mi', + 'exclusive': False, + 'help': ''' + Upload chunk size. + + Note that this is stored in memory and there may be up to + "--transfers" * "--azurefile-upload-concurrency" chunks stored at once + in memory. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 16.0, + 'default_str': '16', + 'exclusive': False, + 'help': ''' + Concurrency for multipart uploads. + + This is the number of chunks of the same file that are uploaded + concurrently. + + If you are uploading small numbers of large files over high-speed + links and these uploads do not fully utilize your bandwidth, then + increasing this may help to speed up the transfers. + + Note that chunks are stored in memory and there may be up to + "--transfers" * "--azurefile-upload-concurrency" chunks stored at once + in memory. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 10737418240.0, + 'default_str': '10Gi', + 'exclusive': False, + 'help': ''' + Max size for streamed files. + + Azure files needs to know in advance how big the file will be. When + rclone doesn't know it uses this value instead. + + This will be used when rclone is streaming data, the most common uses are: + + - Uploading files with `--vfs-cache-mode off` with `rclone mount` + - Using `rclone rcat` + - Copying files with unknown length + + You will need this much free space in the share as the file will be this size temporarily. + + ''', + 'ispassword': False, + 'name': 'max_stream_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 54634382.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,RightPeriod,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'azurefiles', + }), + dict({ + 'description': 'Backblaze B2', + 'name': 'b2', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Account ID or Application Key ID.', + 'ispassword': False, + 'name': 'account', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Application Key.', + 'ispassword': False, + 'name': 'key', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for the service. + + Leave blank normally. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + A flag string for X-Bz-Test-Mode header for debugging. + + This is for debugging purposes only. Setting it to one of the strings + below will cause b2 to return specific errors: + + * "fail_some_uploads" + * "expire_some_account_authorization_tokens" + * "force_cap_exceeded" + + These will be set in the "X-Bz-Test-Mode" header which is documented + in the [b2 integrations checklist](https://www.backblaze.com/docs/cloud-storage-integration-checklist). + ''', + 'ispassword': False, + 'name': 'test_mode', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Include old versions in directory listings. + + Note that when using this no file write operations are permitted, + so you can't upload files or delete them. + ''', + 'ispassword': False, + 'name': 'versions', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '0001-01-01T00:00:00Z', + 'default_str': 'off', + 'exclusive': False, + 'help': ''' + Show file versions as they were at the specified time. + + Note that when using this no file write operations are permitted, + so you can't upload files or delete them. + ''', + 'ispassword': False, + 'name': 'version_at', + 'required': False, + 'sensitive': False, + 'type': 'Time', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Permanently delete files on remote removal, otherwise hide files.', + 'ispassword': False, + 'name': 'hard_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 209715200.0, + 'default_str': '200Mi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to chunked upload. + + Files above this size will be uploaded in chunks of "--b2-chunk-size". + + This value should be set no larger than 4.657 GiB (== 5 GB). + ''', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 4294967296.0, + 'default_str': '4Gi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to multipart copy. + + Any files larger than this that need to be server-side copied will be + copied in chunks of this size. + + The minimum is 0 and the maximum is 4.6 GiB. + ''', + 'ispassword': False, + 'name': 'copy_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 100663296.0, + 'default_str': '96Mi', + 'exclusive': False, + 'help': ''' + Upload chunk size. + + When uploading large files, chunk the file into this size. + + Must fit in memory. These chunks are buffered in memory and there + might a maximum of "--transfers" chunks in progress at once. + + 5,000,000 Bytes is the minimum size. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 4.0, + 'default_str': '4', + 'exclusive': False, + 'help': ''' + Concurrency for multipart uploads. + + This is the number of chunks of the same file that are uploaded + concurrently. + + Note that chunks are stored in memory and there may be up to + "--transfers" * "--b2-upload-concurrency" chunks stored at once + in memory. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Disable checksums for large (> upload cutoff) files. + + Normally rclone will calculate the SHA1 checksum of the input before + uploading it so it can add it to metadata on the object. This is great + for data integrity checking but can cause long delays for large files + to start uploading. + ''', + 'ispassword': False, + 'name': 'disable_checksum', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Custom endpoint for downloads. + + This is usually set to a Cloudflare CDN URL as Backblaze offers + free egress for data downloaded through the Cloudflare network. + Rclone works with private buckets by sending an "Authorization" header. + If the custom endpoint rewrites the requests for authentication, + e.g., in Cloudflare Workers, this header needs to be handled properly. + Leave blank if you want to use the endpoint provided by Backblaze. + + The URL provided here SHOULD have the protocol and SHOULD NOT have + a trailing slash or specify the /file/bucket subpath as rclone will + request files with "{download_url}/file/{bucket_name}/{path}". + + Example: + > https://mysubdomain.mydomain.tld + (No trailing "/", "file" or "bucket") + ''', + 'ispassword': False, + 'name': 'download_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 604800000000000.0, + 'default_str': '1w', + 'exclusive': False, + 'help': ''' + Time before the public link authorization token will expire in s or suffix ms|s|m|h|d. + + This is used in combination with "rclone link" for making files + accessible to the public and sets the duration before the download + authorization token will expire. + + The minimum value is 1 second. The maximum value is one week. + ''', + 'ispassword': False, + 'name': 'download_auth_duration', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': 'How often internal memory buffer pools will be flushed. (no longer used)', + 'ispassword': False, + 'name': 'memory_pool_flush_time', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Whether to use mmap buffers in internal memory pool. (no longer used)', + 'ispassword': False, + 'name': 'memory_pool_use_mmap', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': ''' + Set the number of days deleted files should be kept when creating a bucket. + + On bucket creation, this parameter is used to create a lifecycle rule + for the entire bucket. + + If lifecycle is 0 (the default) it does not create a lifecycle rule so + the default B2 behaviour applies. This is to create versions of files + on delete and overwrite and to keep them indefinitely. + + If lifecycle is >0 then it creates a single rule setting the number of + days before a file that is deleted or overwritten is deleted + permanently. This is known as daysFromHidingToDeleting in the b2 docs. + + The minimum value for this parameter is 1 day. + + You can also enable hard_delete in the config also which will mean + deletions won't cause versions but overwrites will still cause + versions to be made. + + See: [rclone backend lifecycle](#lifecycle) for setting lifecycles after bucket creation. + + ''', + 'ispassword': False, + 'name': 'lifecycle', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 50438146.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'b2', + }), + dict({ + 'description': 'Box', + 'name': 'box', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '0', + 'default_str': '0', + 'exclusive': False, + 'help': 'Fill in for rclone to use a non root folder as its starting point.', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Box App config.json location + + Leave blank normally. + + Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. + ''', + 'ispassword': False, + 'name': 'box_config_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Box App Primary Access Token + + Leave blank normally. + ''', + 'ispassword': False, + 'name': 'access_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'user', + 'default_str': 'user', + 'examples': list([ + dict({ + 'help': 'Rclone should act on behalf of a user.', + 'value': 'user', + }), + dict({ + 'help': 'Rclone should act on behalf of a service account.', + 'value': 'enterprise', + }), + ]), + 'exclusive': False, + 'help': '', + 'ispassword': False, + 'name': 'box_sub_type', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 52428800.0, + 'default_str': '50Mi', + 'exclusive': False, + 'help': 'Cutoff for switching to multipart upload (>= 50 MiB).', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 100.0, + 'default_str': '100', + 'exclusive': False, + 'help': 'Max number of times to try committing a multipart file.', + 'ispassword': False, + 'name': 'commit_retries', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 1000.0, + 'default_str': '1000', + 'exclusive': False, + 'help': 'Size of listing chunk 1-1000.', + 'ispassword': False, + 'name': 'list_chunk', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Only show items owned by the login (email address) passed in.', + 'ispassword': False, + 'name': 'owned_by', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Impersonate this user ID when using a service account. + + Setting this flag allows rclone, when using a JWT service account, to + act on behalf of another user by setting the as-user header. + + The user ID is the Box identifier for a user. User IDs can found for + any user via the GET /users endpoint, which is only available to + admins, or by calling the GET /users/me endpoint with an authenticated + user session. + + See: https://developer.box.com/guides/authentication/jwt/as-user/ + + ''', + 'ispassword': False, + 'name': 'impersonate', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 52535298.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'box', + }), + dict({ + 'description': 'Cloudinary', + 'name': 'cloudinary', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Cloudinary Environment Name', + 'ispassword': False, + 'name': 'cloud_name', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Cloudinary API Key', + 'ispassword': False, + 'name': 'api_key', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Cloudinary API Secret', + 'ispassword': False, + 'name': 'api_secret', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Specify the API endpoint for environments out of the US', + 'ispassword': False, + 'name': 'upload_prefix', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Upload Preset to select asset manipulation on upload', + 'ispassword': False, + 'name': 'upload_preset', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 52543246.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,RightSpace,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0s', + 'exclusive': False, + 'help': 'Wait N seconds for eventual consistency of the databases that support the backend operation', + 'ispassword': False, + 'name': 'eventually_consistent_delay', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': 'Cloudinary handles media formats as a file attribute and strips it from the name, which is unlike most other file systems', + 'ispassword': False, + 'name': 'adjust_media_files_extensions', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': list([ + '3ds', + '3g2', + '3gp', + 'ai', + 'arw', + 'avi', + 'avif', + 'bmp', + 'bw', + 'cr2', + 'cr3', + 'djvu', + 'dng', + 'eps3', + 'fbx', + 'flif', + 'flv', + 'gif', + 'glb', + 'gltf', + 'hdp', + 'heic', + 'heif', + 'ico', + 'indd', + 'jp2', + 'jpe', + 'jpeg', + 'jpg', + 'jxl', + 'jxr', + 'm2ts', + 'mov', + 'mp4', + 'mpeg', + 'mts', + 'mxf', + 'obj', + 'ogv', + 'pdf', + 'ply', + 'png', + 'psd', + 'svg', + 'tga', + 'tif', + 'tiff', + 'ts', + 'u3ma', + 'usdz', + 'wdp', + 'webm', + 'webp', + 'wmv', + ]), + 'default_str': '[3ds 3g2 3gp ai arw avi avif bmp bw cr2 cr3 djvu dng eps3 fbx flif flv gif glb gltf hdp heic heif ico indd jp2 jpe jpeg jpg jxl jxr m2ts mov mp4 mpeg mts mxf obj ogv pdf ply png psd svg tga tif tiff ts u3ma usdz wdp webm webp wmv]', + 'exclusive': False, + 'help': 'Cloudinary supported media extensions', + 'ispassword': False, + 'name': 'media_extensions', + 'required': False, + 'sensitive': False, + 'type': 'stringArray', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'cloudinary', + }), + dict({ + 'description': 'DOI datasets', + 'name': 'doi', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The DOI or the doi.org URL.', + 'ispassword': False, + 'name': 'doi', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Auto-detect provider', + 'value': 'auto', + }), + dict({ + 'help': 'Zenodo', + 'value': 'zenodo', + }), + dict({ + 'help': 'Dataverse', + 'value': 'dataverse', + }), + dict({ + 'help': 'Invenio', + 'value': 'invenio', + }), + ]), + 'exclusive': False, + 'help': ''' + DOI provider. + + The DOI provider can be set when rclone does not automatically recognize a supported DOI provider. + ''', + 'ispassword': False, + 'name': 'provider', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The URL of the DOI resolver API to use. + + The DOI resolver can be set for testing or for cases when the the canonical DOI resolver API cannot be used. + + Defaults to "https://doi.org/api". + ''', + 'ispassword': False, + 'name': 'doi_resolver_api_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'doi', + }), + dict({ + 'description': 'Google Drive', + 'name': 'drive', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Full access all files, excluding Application Data Folder.', + 'value': 'drive', + }), + dict({ + 'help': 'Read-only access to file metadata and file contents.', + 'value': 'drive.readonly', + }), + dict({ + 'help': ''' + Access to files created by rclone only. + These are visible in the drive website. + File authorization is revoked when the user deauthorizes the app. + ''', + 'value': 'drive.file', + }), + dict({ + 'help': ''' + Allows read and write access to the Application Data folder. + This is not visible in the drive website. + ''', + 'value': 'drive.appfolder', + }), + dict({ + 'help': ''' + Allows read-only access to file metadata but + does not allow any access to read or download file content. + ''', + 'value': 'drive.metadata.readonly', + }), + ]), + 'exclusive': False, + 'help': 'Comma separated list of scopes that rclone should use when requesting access from drive.', + 'ispassword': False, + 'name': 'scope', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the root folder. + Leave blank normally. + + Fill in to access "Computers" folders (see docs), or for rclone to use + a non root folder as its starting point. + + ''', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Service Account Credentials JSON file path. + + Leave blank normally. + Needed only if you want use SA instead of interactive login. + + Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. + ''', + 'ispassword': False, + 'name': 'service_account_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Service Account Credentials JSON blob. + + Leave blank normally. + Needed only if you want use SA instead of interactive login. + ''', + 'ispassword': False, + 'name': 'service_account_credentials', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'ID of the Shared Drive (Team Drive).', + 'ispassword': False, + 'name': 'team_drive', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Only consider files owned by the authenticated user.', + 'ispassword': False, + 'name': 'auth_owner_only', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Send files to the trash instead of deleting permanently. + + Defaults to true, namely sending files to the trash. + Use `--drive-use-trash=false` to delete files permanently instead. + ''', + 'ispassword': False, + 'name': 'use_trash', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Server side copy contents of shortcuts instead of the shortcut. + + When doing server side copies, normally rclone will copy shortcuts as + shortcuts. + + If this flag is used then rclone will copy the contents of shortcuts + rather than shortcuts themselves when doing server side copies. + ''', + 'ispassword': False, + 'name': 'copy_shortcut_content', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Skip google documents in all listings. + + If given, gdocs practically become invisible to rclone. + ''', + 'ispassword': False, + 'name': 'skip_gdocs', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Show all Google Docs including non-exportable ones in listings. + + If you try a server side copy on a Google Form without this flag, you + will get this error: + + No export formats found for "application/vnd.google-apps.form" + + However adding this flag will allow the form to be server side copied. + + Note that rclone doesn't add extensions to the Google Docs file names + in this mode. + + Do **not** use this flag when trying to download Google Docs - rclone + will fail to download them. + + ''', + 'ispassword': False, + 'name': 'show_all_gdocs', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Skip checksums on Google photos and videos only. + + Use this if you get checksum errors when transferring Google photos or + videos. + + Setting this flag will cause Google photos and videos to return a + blank checksums. + + Google photos are identified by being in the "photos" space. + + Corrupted checksums are caused by Google modifying the image/video but + not updating the checksum. + ''', + 'ispassword': False, + 'name': 'skip_checksum_gphotos', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Only show files that are shared with me. + + Instructs rclone to operate on your "Shared with me" folder (where + Google Drive lets you access the files and folders others have shared + with you). + + This works both with the "list" (lsd, lsl, etc.) and the "copy" + commands (copy, sync, etc.), and with all other commands too. + ''', + 'ispassword': False, + 'name': 'shared_with_me', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Only show files that are in the trash. + + This will show trashed files in their original directory structure. + ''', + 'ispassword': False, + 'name': 'trashed_only', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Only show files that are starred.', + 'ispassword': False, + 'name': 'starred_only', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Deprecated: See export_formats.', + 'ispassword': False, + 'name': 'formats', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'docx,xlsx,pptx,svg', + 'default_str': 'docx,xlsx,pptx,svg', + 'exclusive': False, + 'help': 'Comma separated list of preferred formats for downloading Google docs.', + 'ispassword': False, + 'name': 'export_formats', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Comma separated list of preferred formats for uploading Google docs.', + 'ispassword': False, + 'name': 'import_formats', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Allow the filetype to change when uploading Google docs. + + E.g. file.doc to file.docx. This will confuse sync and reupload every time. + ''', + 'ispassword': False, + 'name': 'allow_import_name_change', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use file created date instead of modified date. + + Useful when downloading data and you want the creation date used in + place of the last modified date. + + **WARNING**: This flag may have some unexpected consequences. + + When uploading to your drive all files will be overwritten unless they + haven't been modified since their creation. And the inverse will occur + while downloading. This side effect can be avoided by using the + "--checksum" flag. + + This feature was implemented to retain photos capture date as recorded + by google photos. You will first need to check the "Create a Google + Photos folder" option in your google drive settings. You can then copy + or move the photos locally and use the date the image was taken + (created) set as the modification date. + ''', + 'ispassword': False, + 'name': 'use_created_date', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use date file was shared instead of modified date. + + Note that, as with "--drive-use-created-date", this flag may have + unexpected consequences when uploading/downloading files. + + If both this flag and "--drive-use-created-date" are set, the created + date is used. + ''', + 'ispassword': False, + 'name': 'use_shared_date', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 1000.0, + 'default_str': '1000', + 'exclusive': False, + 'help': 'Size of listing chunk 100-1000, 0 to disable.', + 'ispassword': False, + 'name': 'list_chunk', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Impersonate this user when using a service account.', + 'ispassword': False, + 'name': 'impersonate', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Deprecated: No longer needed.', + 'ispassword': False, + 'name': 'alternate_export', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 8388608.0, + 'default_str': '8Mi', + 'exclusive': False, + 'help': 'Cutoff for switching to chunked upload.', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 8388608.0, + 'default_str': '8Mi', + 'exclusive': False, + 'help': ''' + Upload chunk size. + + Must a power of 2 >= 256k. + + Making this larger will improve performance, but note that each chunk + is buffered in memory one per transfer. + + Reducing this will reduce memory usage but decrease performance. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Set to allow files which return cannotDownloadAbusiveFile to be downloaded. + + If downloading a file returns the error "This file has been identified + as malware or spam and cannot be downloaded" with the error code + "cannotDownloadAbusiveFile" then supply this flag to rclone to + indicate you acknowledge the risks of downloading the file and rclone + will download it anyway. + + Note that if you are using service account it will need Manager + permission (not Content Manager) to for this flag to work. If the SA + does not have the right permission, Google will just ignore the flag. + ''', + 'ispassword': False, + 'name': 'acknowledge_abuse', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Keep new head revision of each file forever.', + 'ispassword': False, + 'name': 'keep_revision_forever', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Show sizes as storage quota usage, not actual size. + + Show the size of a file as the storage quota used. This is the + current version plus any older versions that have been set to keep + forever. + + **WARNING**: This flag may have some unexpected consequences. + + It is not recommended to set this flag in your config - the + recommended usage is using the flag form --drive-size-as-quota when + doing rclone ls/lsl/lsf/lsjson/etc only. + + If you do use this flag for syncing (not recommended) then you will + need to use --ignore size also. + ''', + 'ispassword': False, + 'name': 'size_as_quota', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': -1.0, + 'default_str': 'off', + 'exclusive': False, + 'help': "If Object's are greater, use drive v2 API to download.", + 'ispassword': False, + 'name': 'v2_download_min_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 100000000.0, + 'default_str': '100ms', + 'exclusive': False, + 'help': 'Minimum time to sleep between API calls.', + 'ispassword': False, + 'name': 'pacer_min_sleep', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 100.0, + 'default_str': '100', + 'exclusive': False, + 'help': 'Number of API calls to allow without sleeping.', + 'ispassword': False, + 'name': 'pacer_burst', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Deprecated: use --server-side-across-configs instead. + + Allow server-side operations (e.g. copy) to work across different drive configs. + + This can be useful if you wish to do a server-side copy between two + different Google drives. Note that this isn't enabled by default + because it isn't easy to tell if it will work between any two + configurations. + ''', + 'ispassword': False, + 'name': 'server_side_across_configs', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Disable drive using http2. + + There is currently an unsolved issue with the google drive backend and + HTTP/2. HTTP/2 is therefore disabled by default for the drive backend + but can be re-enabled here. When the issue is solved this flag will + be removed. + + See: https://github.com/rclone/rclone/issues/3631 + + + ''', + 'ispassword': False, + 'name': 'disable_http2', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Make upload limit errors be fatal. + + At the time of writing it is only possible to upload 750 GiB of data to + Google Drive a day (this is an undocumented limit). When this limit is + reached Google Drive produces a slightly different error message. When + this flag is set it causes these errors to be fatal. These will stop + the in-progress sync. + + Note that this detection is relying on error message strings which + Google don't document so it may break in the future. + + See: https://github.com/rclone/rclone/issues/3857 + + ''', + 'ispassword': False, + 'name': 'stop_on_upload_limit', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Make download limit errors be fatal. + + At the time of writing it is only possible to download 10 TiB of data from + Google Drive a day (this is an undocumented limit). When this limit is + reached Google Drive produces a slightly different error message. When + this flag is set it causes these errors to be fatal. These will stop + the in-progress sync. + + Note that this detection is relying on error message strings which + Google don't document so it may break in the future. + + ''', + 'ispassword': False, + 'name': 'stop_on_download_limit', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set skip shortcut files. + + Normally rclone dereferences shortcut files making them appear as if + they are the original file (see [the shortcuts section](#shortcuts)). + If this flag is set then rclone will ignore shortcut files completely. + + ''', + 'ispassword': False, + 'name': 'skip_shortcuts', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set skip dangling shortcut files. + + If this is set then rclone will not show any dangling shortcuts in listings. + + ''', + 'ispassword': False, + 'name': 'skip_dangling_shortcuts', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Resource key for accessing a link-shared file. + + If you need to access files shared with a link like this + + https://drive.google.com/drive/folders/XXX?resourcekey=YYY&usp=sharing + + Then you will need to use the first part "XXX" as the "root_folder_id" + and the second part "YYY" as the "resource_key" otherwise you will get + 404 not found errors when trying to access the directory. + + See: https://developers.google.com/drive/api/guides/resource-keys + + This resource key requirement only applies to a subset of old files. + + Note also that opening the folder once in the web interface (with the + user you've authenticated rclone with) seems to be enough so that the + resource key is not needed. + + ''', + 'ispassword': False, + 'name': 'resource_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Work around a bug in Google Drive listing. + + Normally rclone will work around a bug in Google Drive when using + --fast-list (ListR) where the search "(A in parents) or (B in + parents)" returns nothing sometimes. See #3114, #4289 and + https://issuetracker.google.com/issues/149522397 + + Rclone detects this by finding no items in more than one directory + when listing and retries them as lists of individual directories. + + This means that if you have a lot of empty directories rclone will end + up listing them all individually and this can take many more API + calls. + + This flag allows the work-around to be disabled. This is **not** + recommended in normal use - only if you have a particular case you are + having trouble with like many empty directories. + + ''', + 'ispassword': False, + 'name': 'fast_list_bug_fix', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 1.0, + 'default_str': 'read', + 'examples': list([ + dict({ + 'help': 'Do not read or write the value', + 'value': 'off', + }), + dict({ + 'help': 'Read the value only', + 'value': 'read', + }), + dict({ + 'help': 'Write the value only', + 'value': 'write', + }), + dict({ + 'help': "If writing fails log errors only, don't fail the transfer", + 'value': 'failok', + }), + dict({ + 'help': 'Read and Write the value.', + 'value': 'read,write', + }), + ]), + 'exclusive': False, + 'help': ''' + Control whether owner should be read or written in metadata. + + Owner is a standard part of the file metadata so is easy to read. But it + isn't always desirable to set the owner from the metadata. + + Note that you can't set the owner on Shared Drives, and that setting + ownership will generate an email to the new owner (this can't be + disabled), and you can't transfer ownership to someone outside your + organization. + + ''', + 'ispassword': False, + 'name': 'metadata_owner', + 'required': False, + 'sensitive': False, + 'type': 'Bits', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': 'off', + 'examples': list([ + dict({ + 'help': 'Do not read or write the value', + 'value': 'off', + }), + dict({ + 'help': 'Read the value only', + 'value': 'read', + }), + dict({ + 'help': 'Write the value only', + 'value': 'write', + }), + dict({ + 'help': "If writing fails log errors only, don't fail the transfer", + 'value': 'failok', + }), + dict({ + 'help': 'Read and Write the value.', + 'value': 'read,write', + }), + ]), + 'exclusive': False, + 'help': ''' + Control whether permissions should be read or written in metadata. + + Reading permissions metadata from files can be done quickly, but it + isn't always desirable to set the permissions from the metadata. + + Note that rclone drops any inherited permissions on Shared Drives and + any owner permission on My Drives as these are duplicated in the owner + metadata. + + ''', + 'ispassword': False, + 'name': 'metadata_permissions', + 'required': False, + 'sensitive': False, + 'type': 'Bits', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': 'off', + 'examples': list([ + dict({ + 'help': 'Do not read or write the value', + 'value': 'off', + }), + dict({ + 'help': 'Read the value only', + 'value': 'read', + }), + dict({ + 'help': 'Write the value only', + 'value': 'write', + }), + dict({ + 'help': "If writing fails log errors only, don't fail the transfer", + 'value': 'failok', + }), + dict({ + 'help': 'Read and Write the value.', + 'value': 'read,write', + }), + ]), + 'exclusive': False, + 'help': ''' + Control whether labels should be read or written in metadata. + + Reading labels metadata from files takes an extra API transaction and + will slow down listings. It isn't always desirable to set the labels + from the metadata. + + The format of labels is documented in the drive API documentation at + https://developers.google.com/drive/api/reference/rest/v3/Label - + rclone just provides a JSON dump of this format. + + When setting labels, the label and fields must already exist - rclone + will not create them. This means that if you are transferring labels + from two different accounts you will have to create the labels in + advance and use the metadata mapper to translate the IDs between the + two accounts. + + ''', + 'ispassword': False, + 'name': 'metadata_labels', + 'required': False, + 'sensitive': False, + 'type': 'Bits', + }), + dict({ + 'advanced': True, + 'default': 16777216.0, + 'default_str': 'InvalidUtf8', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'examples': list([ + dict({ + 'help': 'Enter credentials in the next step.', + 'value': 'false', + }), + dict({ + 'help': 'Get GCP IAM credentials from the environment (env vars or IAM).', + 'value': 'true', + }), + ]), + 'exclusive': False, + 'help': ''' + Get IAM credentials from runtime (environment variables or instance meta data if no env vars). + + Only applies if service_account_file and service_account_credentials is blank. + ''', + 'ispassword': False, + 'name': 'env_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'drive', + }), + dict({ + 'description': 'Dropbox', + 'name': 'dropbox', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50331648.0, + 'default_str': '48Mi', + 'exclusive': False, + 'help': ''' + Upload chunk size (< 150Mi). + + Any files larger than this will be uploaded in chunks of this size. + + Note that chunks are buffered in memory (one at a time) so rclone can + deal with retries. Setting this larger will increase the speed + slightly (at most 10% for 128 MiB in tests) at the cost of using more + memory. It can be set smaller if you are tight on memory. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Impersonate this user when using a business account. + + Note that if you want to use impersonate, you should make sure this + flag is set when running "rclone config" as this will cause rclone to + request the "members.read" scope which it won't normally. This is + needed to lookup a members email address into the internal ID that + dropbox uses in the API. + + Using the "members.read" scope will require a Dropbox Team Admin + to approve during the OAuth flow. + + You will have to use your own App (setting your own client_id and + client_secret) to use this option as currently rclone's default set of + permissions doesn't include "members.read". This can be added once + v1.55 or later is in use everywhere. + + ''', + 'ispassword': False, + 'name': 'impersonate', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Instructs rclone to work on individual shared files. + + In this mode rclone's features are extremely limited - only list (ls, lsl, etc.) + operations and read operations (e.g. downloading) are supported in this mode. + All other operations will be disabled. + ''', + 'ispassword': False, + 'name': 'shared_files', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Instructs rclone to work on shared folders. + + When this flag is used with no path only the List operation is supported and + all available shared folders will be listed. If you specify a path the first part + will be interpreted as the name of shared folder. Rclone will then try to mount this + shared to the root namespace. On success shared folder rclone proceeds normally. + The shared folder is now pretty much a normal folder and all normal operations + are supported. + + Note that we don't unmount the shared folder afterwards so the + --dropbox-shared-folders can be omitted after the first use of a particular + shared folder. + + See also --dropbox-root-namespace for an alternative way to work with shared + folders. + ''', + 'ispassword': False, + 'name': 'shared_folders', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 10000000.0, + 'default_str': '10ms', + 'exclusive': False, + 'help': 'Minimum time to sleep between API calls.', + 'ispassword': False, + 'name': 'pacer_min_sleep', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 52469762.0, + 'default_str': 'Slash,BackSlash,Del,RightSpace,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Specify a different Dropbox namespace ID to use as the root for all paths.', + 'ispassword': False, + 'name': 'root_namespace', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': list([ + 'html', + 'md', + ]), + 'default_str': 'html,md', + 'exclusive': False, + 'help': ''' + Comma separated list of preferred formats for exporting files + + Certain Dropbox files can only be accessed by exporting them to another format. + These include Dropbox Paper documents. + + For each such file, rclone will choose the first format on this list that Dropbox + considers valid. If none is valid, it will choose Dropbox's default format. + + Known formats include: "html", "md" (markdown) + ''', + 'ispassword': False, + 'name': 'export_formats', + 'required': False, + 'sensitive': False, + 'type': 'CommaSepList', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Skip exportable files in all listings. + + If given, exportable files practically become invisible to rclone. + ''', + 'ispassword': False, + 'name': 'skip_exports', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Show all exportable files in listings. + + Adding this flag will allow all exportable files to be server side copied. + Note that rclone doesn't add extensions to the exportable file names in this mode. + + Do **not** use this flag when trying to download exportable files - rclone + will fail to download them. + + ''', + 'ispassword': False, + 'name': 'show_all_exports', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 'sync', + 'default_str': 'sync', + 'exclusive': False, + 'help': ''' + Upload file batching sync|async|off. + + This sets the batch mode used by rclone. + + For full info see [the main docs](https://rclone.org/dropbox/#batch-mode) + + This has 3 possible values + + - off - no batching + - sync - batch uploads and check completion (default) + - async - batch upload and don't check completion + + Rclone will close any outstanding batches when it exits which may make + a delay on quit. + + ''', + 'ispassword': False, + 'name': 'batch_mode', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': ''' + Max number of files in upload batch. + + This sets the batch size of files to upload. It has to be less than 1000. + + By default this is 0 which means rclone will calculate the batch size + depending on the setting of batch_mode. + + - batch_mode: async - default batch_size is 100 + - batch_mode: sync - default batch_size is the same as --transfers + - batch_mode: off - not in use + + Rclone will close any outstanding batches when it exits which may make + a delay on quit. + + Setting this is a great idea if you are uploading lots of small files + as it will make them a lot quicker. You can use --transfers 32 to + maximise throughput. + + ''', + 'ispassword': False, + 'name': 'batch_size', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0s', + 'exclusive': False, + 'help': ''' + Max time to allow an idle upload batch before uploading. + + If an upload batch is idle for more than this long then it will be + uploaded. + + The default for this is 0 which means rclone will choose a sensible + default based on the batch_mode in use. + + - batch_mode: async - default batch_timeout is 10s + - batch_mode: sync - default batch_timeout is 500ms + - batch_mode: off - not in use + + ''', + 'ispassword': False, + 'name': 'batch_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 600000000000.0, + 'default_str': '10m0s', + 'exclusive': False, + 'help': 'Max time to wait for a batch to finish committing. (no longer used)', + 'ispassword': False, + 'name': 'batch_commit_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'dropbox', + }), + dict({ + 'description': '1Fichier', + 'name': 'fichier', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Your API Key, get it from https://1fichier.com/console/params.pl.', + 'ispassword': False, + 'name': 'api_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'If you want to download a shared folder, add this parameter.', + 'ispassword': False, + 'name': 'shared_folder', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'If you want to download a shared file that is password protected, add this parameter.', + 'ispassword': True, + 'name': 'file_password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'If you want to list the files in a shared folder that is password protected, add this parameter.', + 'ispassword': True, + 'name': 'folder_password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Set if you wish to use CDN download links.', + 'ispassword': False, + 'name': 'cdn', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 52666494.0, + 'default_str': 'Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,BackSlash,Del,Ctl,LeftSpace,RightSpace,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'fichier', + }), + dict({ + 'description': 'Enterprise File Fabric', + 'name': 'filefabric', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Storage Made Easy US', + 'value': 'https://storagemadeeasy.com', + }), + dict({ + 'help': 'Storage Made Easy EU', + 'value': 'https://eu.storagemadeeasy.com', + }), + dict({ + 'help': 'Connect to your Enterprise File Fabric', + 'value': 'https://yourfabric.smestorage.com', + }), + ]), + 'exclusive': False, + 'help': 'URL of the Enterprise File Fabric to connect to.', + 'ispassword': False, + 'name': 'url', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the root folder. + + Leave blank normally. + + Fill in to make rclone start with directory of a given ID. + + ''', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Permanent Authentication Token. + + A Permanent Authentication Token can be created in the Enterprise File + Fabric, on the users Dashboard under Security, there is an entry + you'll see called "My Authentication Tokens". Click the Manage button + to create one. + + These tokens are normally valid for several years. + + For more info see: https://docs.storagemadeeasy.com/organisationcloud/api-tokens + + ''', + 'ispassword': False, + 'name': 'permanent_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Session Token. + + This is a session token which rclone caches in the config file. It is + usually valid for 1 hour. + + Don't set this value - rclone will set it automatically. + + ''', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token expiry time. + + Don't set this value - rclone will set it automatically. + + ''', + 'ispassword': False, + 'name': 'token_expiry', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Version read from the file fabric. + + Don't set this value - rclone will set it automatically. + + ''', + 'ispassword': False, + 'name': 'version', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50429954.0, + 'default_str': 'Slash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'filefabric', + }), + dict({ + 'description': 'FileLu Cloud Storage', + 'name': 'filelu', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Your FileLu Rclone key from My Account', + 'ispassword': False, + 'name': 'key', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 536870910.0, + 'default_str': 'Slash,LtGt,DoubleQuote,SingleQuote,BackQuote,Dollar,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,CrLf,Del,Ctl,LeftSpace,LeftPeriod,LeftTilde,LeftCrLfHtVt,RightSpace,RightPeriod,RightCrLfHtVt,InvalidUtf8,Dot,SquareBracket,Semicolon,Exclamation', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'filelu', + }), + dict({ + 'description': 'Files.com', + 'name': 'filescom', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Your site subdomain (e.g. mysite) or custom domain (e.g. myfiles.customdomain.com).', + 'ispassword': False, + 'name': 'site', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The username used to authenticate with Files.com.', + 'ispassword': False, + 'name': 'username', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The password used to authenticate with Files.com.', + 'ispassword': True, + 'name': 'password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The API key used to authenticate with Files.com.', + 'ispassword': False, + 'name': 'api_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 60923906.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'filescom', + }), + dict({ + 'description': 'FTP', + 'name': 'ftp', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + FTP host to connect to. + + E.g. "ftp.example.com". + ''', + 'ispassword': False, + 'name': 'host', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'vscode', + 'default_str': 'vscode', + 'exclusive': False, + 'help': 'FTP username.', + 'ispassword': False, + 'name': 'user', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 21.0, + 'default_str': '21', + 'exclusive': False, + 'help': 'FTP port number.', + 'ispassword': False, + 'name': 'port', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'FTP password.', + 'ispassword': True, + 'name': 'pass', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use Implicit FTPS (FTP over TLS). + + When using implicit FTP over TLS the client connects using TLS + right from the start which breaks compatibility with + non-TLS-aware servers. This is usually served over port 990 rather + than port 21. Cannot be used in combination with explicit FTPS. + ''', + 'ispassword': False, + 'name': 'tls', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use Explicit FTPS (FTP over TLS). + + When using explicit FTP over TLS the client explicitly requests + security from the server in order to upgrade a plain text connection + to an encrypted one. Cannot be used in combination with implicit FTPS. + ''', + 'ispassword': False, + 'name': 'explicit_tls', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': ''' + Maximum number of FTP simultaneous connections, 0 for unlimited. + + Note that setting this is very likely to cause deadlocks so it should + be used with care. + + If you are doing a sync or copy then make sure concurrency is one more + than the sum of `--transfers` and `--checkers`. + + If you use `--check-first` then it just needs to be one more than the + maximum of `--checkers` and `--transfers`. + + So for `concurrency 3` you'd use `--checkers 2 --transfers 2 + --check-first` or `--checkers 1 --transfers 1`. + + + ''', + 'ispassword': False, + 'name': 'concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Do not verify the TLS certificate of the server.', + 'ispassword': False, + 'name': 'no_check_certificate', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Disable using EPSV even if server advertises support.', + 'ispassword': False, + 'name': 'disable_epsv', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Disable using MLSD even if server advertises support.', + 'ispassword': False, + 'name': 'disable_mlsd', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Disable using UTF-8 even if server advertises support.', + 'ispassword': False, + 'name': 'disable_utf8', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Use MDTM to set modification time (VsFtpd quirk)', + 'ispassword': False, + 'name': 'writing_mdtm', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Use LIST -a to force listing of hidden files and folders. This will disable the use of MLSD.', + 'ispassword': False, + 'name': 'force_list_hidden', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': ''' + Max time before closing idle connections. + + If no connections have been returned to the connection pool in the time + given, rclone will empty the connection pool. + + Set to 0 to keep connections indefinitely. + + ''', + 'ispassword': False, + 'name': 'idle_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': 'Maximum time to wait for a response to close.', + 'ispassword': False, + 'name': 'close_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 32.0, + 'default_str': '32', + 'exclusive': False, + 'help': ''' + Size of TLS session cache for all control and data connections. + + TLS cache allows to resume TLS sessions and reuse PSK between connections. + Increase if default size is not enough resulting in TLS resumption errors. + Enabled by default. Use 0 to disable. + ''', + 'ispassword': False, + 'name': 'tls_cache_size', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Disable TLS 1.3 (workaround for FTP servers with buggy TLS)', + 'ispassword': False, + 'name': 'disable_tls13', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': 'Maximum time to wait for data connection closing status.', + 'ispassword': False, + 'name': 'shut_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Allow asking for FTP password when needed. + + If this is set and no password is supplied then rclone will ask for a password + + ''', + 'ispassword': False, + 'name': 'ask_password', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Socks 5 proxy host. + + Supports the format user:pass@host:port, user@host:port, host:port. + + Example: + + myUser:myPass@localhost:9005 + + ''', + 'ispassword': False, + 'name': 'socks_proxy', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + URL for HTTP CONNECT proxy + + Set this to a URL for an HTTP proxy which supports the HTTP CONNECT verb. + + ''', + 'ispassword': False, + 'name': 'http_proxy', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Don't check the upload is OK + + Normally rclone will try to check the upload exists after it has + uploaded a file to make sure the size and modification time are as + expected. + + This flag stops rclone doing these checks. This enables uploading to + folders which are write only. + + You will likely need to use the --inplace flag also if uploading to + a write only folder. + + ''', + 'ispassword': False, + 'name': 'no_check_upload', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 35749890.0, + 'default_str': 'Slash,Del,Ctl,RightSpace,Dot', + 'examples': list([ + dict({ + 'help': "ProFTPd can't handle '*' in file names", + 'value': 'Asterisk,Ctl,Dot,Slash', + }), + dict({ + 'help': "PureFTPd can't handle '[]' or '*' in file names", + 'value': 'BackSlash,Ctl,Del,Dot,RightSpace,Slash,SquareBracket', + }), + dict({ + 'help': "VsFTPd can't handle file names starting with dot", + 'value': 'Ctl,LeftPeriod,Slash', + }), + ]), + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'ftp', + }), + dict({ + 'description': 'Gofile', + 'name': 'gofile', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + API Access token + + You can get this from the web control panel. + ''', + 'ispassword': False, + 'name': 'access_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the root folder + + Leave this blank normally, rclone will fill it in automatically. + + If you want rclone to be restricted to a particular folder you can + fill it in - see the docs for more info. + + ''', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Account ID + + Leave this blank normally, rclone will fill it in automatically. + + ''', + 'ispassword': False, + 'name': 'account_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 1000.0, + 'default_str': '1000', + 'exclusive': False, + 'help': 'Number of items to list in each call', + 'ispassword': False, + 'name': 'list_chunk', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 323331982.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftPeriod,RightPeriod,InvalidUtf8,Dot,Exclamation', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'gofile', + }), + dict({ + 'description': 'Google Cloud Storage (this is not Google Drive)', + 'name': 'google cloud storage', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Project number. + + Optional - needed only for list/create/delete buckets - see your developer console. + ''', + 'ispassword': False, + 'name': 'project_number', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + User project. + + Optional - needed only for requester pays. + ''', + 'ispassword': False, + 'name': 'user_project', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Service Account Credentials JSON file path. + + Leave blank normally. + Needed only if you want use SA instead of interactive login. + + Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. + ''', + 'ispassword': False, + 'name': 'service_account_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Service Account Credentials JSON blob. + + Leave blank normally. + Needed only if you want use SA instead of interactive login. + ''', + 'ispassword': False, + 'name': 'service_account_credentials', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Short-lived access token. + + Leave blank normally. + Needed only if you want use short-lived access token instead of interactive login. + ''', + 'ispassword': False, + 'name': 'access_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Access public buckets and objects without credentials. + + Set to 'true' if you just want to download files and don't configure credentials. + ''', + 'ispassword': False, + 'name': 'anonymous', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + Object owner gets OWNER access. + All Authenticated Users get READER access. + ''', + 'value': 'authenticatedRead', + }), + dict({ + 'help': ''' + Object owner gets OWNER access. + Project team owners get OWNER access. + ''', + 'value': 'bucketOwnerFullControl', + }), + dict({ + 'help': ''' + Object owner gets OWNER access. + Project team owners get READER access. + ''', + 'value': 'bucketOwnerRead', + }), + dict({ + 'help': ''' + Object owner gets OWNER access. + Default if left blank. + ''', + 'value': 'private', + }), + dict({ + 'help': ''' + Object owner gets OWNER access. + Project team members get access according to their roles. + ''', + 'value': 'projectPrivate', + }), + dict({ + 'help': ''' + Object owner gets OWNER access. + All Users get READER access. + ''', + 'value': 'publicRead', + }), + ]), + 'exclusive': False, + 'help': 'Access Control List for new objects.', + 'ispassword': False, + 'name': 'object_acl', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + Project team owners get OWNER access. + All Authenticated Users get READER access. + ''', + 'value': 'authenticatedRead', + }), + dict({ + 'help': ''' + Project team owners get OWNER access. + Default if left blank. + ''', + 'value': 'private', + }), + dict({ + 'help': 'Project team members get access according to their roles.', + 'value': 'projectPrivate', + }), + dict({ + 'help': ''' + Project team owners get OWNER access. + All Users get READER access. + ''', + 'value': 'publicRead', + }), + dict({ + 'help': ''' + Project team owners get OWNER access. + All Users get WRITER access. + ''', + 'value': 'publicReadWrite', + }), + ]), + 'exclusive': False, + 'help': 'Access Control List for new buckets.', + 'ispassword': False, + 'name': 'bucket_acl', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Access checks should use bucket-level IAM policies. + + If you want to upload objects to a bucket with Bucket Policy Only set + then you will need to set this. + + When it is set, rclone: + + - ignores ACLs set on buckets + - ignores ACLs set on objects + - creates buckets with Bucket Policy Only set + + Docs: https://cloud.google.com/storage/docs/bucket-policy-only + + ''', + 'ispassword': False, + 'name': 'bucket_policy_only', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Empty for default location (US)', + 'value': '', + }), + dict({ + 'help': 'Multi-regional location for Asia', + 'value': 'asia', + }), + dict({ + 'help': 'Multi-regional location for Europe', + 'value': 'eu', + }), + dict({ + 'help': 'Multi-regional location for United States', + 'value': 'us', + }), + dict({ + 'help': 'Taiwan', + 'value': 'asia-east1', + }), + dict({ + 'help': 'Hong Kong', + 'value': 'asia-east2', + }), + dict({ + 'help': 'Tokyo', + 'value': 'asia-northeast1', + }), + dict({ + 'help': 'Osaka', + 'value': 'asia-northeast2', + }), + dict({ + 'help': 'Seoul', + 'value': 'asia-northeast3', + }), + dict({ + 'help': 'Mumbai', + 'value': 'asia-south1', + }), + dict({ + 'help': 'Delhi', + 'value': 'asia-south2', + }), + dict({ + 'help': 'Singapore', + 'value': 'asia-southeast1', + }), + dict({ + 'help': 'Jakarta', + 'value': 'asia-southeast2', + }), + dict({ + 'help': 'Sydney', + 'value': 'australia-southeast1', + }), + dict({ + 'help': 'Melbourne', + 'value': 'australia-southeast2', + }), + dict({ + 'help': 'Finland', + 'value': 'europe-north1', + }), + dict({ + 'help': 'Belgium', + 'value': 'europe-west1', + }), + dict({ + 'help': 'London', + 'value': 'europe-west2', + }), + dict({ + 'help': 'Frankfurt', + 'value': 'europe-west3', + }), + dict({ + 'help': 'Netherlands', + 'value': 'europe-west4', + }), + dict({ + 'help': 'Zürich', + 'value': 'europe-west6', + }), + dict({ + 'help': 'Warsaw', + 'value': 'europe-central2', + }), + dict({ + 'help': 'Iowa', + 'value': 'us-central1', + }), + dict({ + 'help': 'South Carolina', + 'value': 'us-east1', + }), + dict({ + 'help': 'Northern Virginia', + 'value': 'us-east4', + }), + dict({ + 'help': 'Oregon', + 'value': 'us-west1', + }), + dict({ + 'help': 'California', + 'value': 'us-west2', + }), + dict({ + 'help': 'Salt Lake City', + 'value': 'us-west3', + }), + dict({ + 'help': 'Las Vegas', + 'value': 'us-west4', + }), + dict({ + 'help': 'Montréal', + 'value': 'northamerica-northeast1', + }), + dict({ + 'help': 'Toronto', + 'value': 'northamerica-northeast2', + }), + dict({ + 'help': 'São Paulo', + 'value': 'southamerica-east1', + }), + dict({ + 'help': 'Santiago', + 'value': 'southamerica-west1', + }), + dict({ + 'help': 'Dual region: asia-northeast1 and asia-northeast2.', + 'value': 'asia1', + }), + dict({ + 'help': 'Dual region: europe-north1 and europe-west4.', + 'value': 'eur4', + }), + dict({ + 'help': 'Dual region: us-central1 and us-east1.', + 'value': 'nam4', + }), + ]), + 'exclusive': False, + 'help': 'Location for the newly created buckets.', + 'ispassword': False, + 'name': 'location', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Default', + 'value': '', + }), + dict({ + 'help': 'Multi-regional storage class', + 'value': 'MULTI_REGIONAL', + }), + dict({ + 'help': 'Regional storage class', + 'value': 'REGIONAL', + }), + dict({ + 'help': 'Nearline storage class', + 'value': 'NEARLINE', + }), + dict({ + 'help': 'Coldline storage class', + 'value': 'COLDLINE', + }), + dict({ + 'help': 'Archive storage class', + 'value': 'ARCHIVE', + }), + dict({ + 'help': 'Durable reduced availability storage class', + 'value': 'DURABLE_REDUCED_AVAILABILITY', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing objects in Google Cloud Storage.', + 'ispassword': False, + 'name': 'storage_class', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Upload an empty object with a trailing slash when a new directory is created + + Empty folders are unsupported for bucket based remotes, this option creates an empty + object ending with "/", to persist the folder. + + ''', + 'ispassword': False, + 'name': 'directory_markers', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set, don't attempt to check the bucket exists or create it. + + This can be useful when trying to minimise the number of transactions + rclone does if you know the bucket exists already. + + ''', + 'ispassword': False, + 'name': 'no_check_bucket', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set this will decompress gzip encoded objects. + + It is possible to upload objects to GCS with "Content-Encoding: gzip" + set. Normally rclone will download these files as compressed objects. + + If this flag is set then rclone will decompress these files with + "Content-Encoding: gzip" as they are received. This means that rclone + can't check the size and hash but the file contents will be decompressed. + + ''', + 'ispassword': False, + 'name': 'decompress', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for the service. + + Leave blank normally. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50348034.0, + 'default_str': 'Slash,CrLf,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'examples': list([ + dict({ + 'help': 'Enter credentials in the next step.', + 'value': 'false', + }), + dict({ + 'help': 'Get GCP IAM credentials from the environment (env vars or IAM).', + 'value': 'true', + }), + ]), + 'exclusive': False, + 'help': ''' + Get GCP IAM credentials from runtime (environment variables or instance meta data if no env vars). + + Only applies if service_account_file and service_account_credentials is blank. + ''', + 'ispassword': False, + 'name': 'env_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'gcs', + }), + dict({ + 'description': 'Google Photos', + 'name': 'google photos', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Set to make the Google Photos backend read only. + + If you choose read only then rclone will only request read only access + to your photos, otherwise rclone will request full access. + ''', + 'ispassword': False, + 'name': 'read_only', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Set to read the size of media items. + + Normally rclone does not read the size of media items since this takes + another transaction. This isn't necessary for syncing. However + rclone mount needs to know the size of files in advance of reading + them, so setting this flag when using rclone mount is recommended if + you want to read the media. + ''', + 'ispassword': False, + 'name': 'read_size', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 2000.0, + 'default_str': '2000', + 'exclusive': False, + 'help': 'Year limits the photos to be downloaded to those which are uploaded after the given year.', + 'ispassword': False, + 'name': 'start_year', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Also view and download archived media. + + By default, rclone does not request archived media. Thus, when syncing, + archived media is not visible in directory listings or transferred. + + Note that media in albums is always visible and synced, no matter + their archive status. + + With this flag, archived media are always visible in directory + listings and transferred. + + Without this flag, archived media will not be visible in directory + listings and won't be transferred. + ''', + 'ispassword': False, + 'name': 'include_archived', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Use the gphotosdl proxy for downloading the full resolution images + + The Google API will deliver images and video which aren't full + resolution, and/or have EXIF data missing. + + However if you use the gphotosdl proxy then you can download original, + unchanged images. + + This runs a headless browser in the background. + + Download the software from [gphotosdl](https://github.com/rclone/gphotosdl) + + First run with + + gphotosdl -login + + Then once you have logged into google photos close the browser window + and run + + gphotosdl + + Then supply the parameter `--gphotos-proxy "http://localhost:8282"` to make + rclone use the proxy. + + ''', + 'ispassword': False, + 'name': 'proxy', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50348034.0, + 'default_str': 'Slash,CrLf,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': 'sync', + 'default_str': 'sync', + 'exclusive': False, + 'help': ''' + Upload file batching sync|async|off. + + This sets the batch mode used by rclone. + + This has 3 possible values + + - off - no batching + - sync - batch uploads and check completion (default) + - async - batch upload and don't check completion + + Rclone will close any outstanding batches when it exits which may make + a delay on quit. + + ''', + 'ispassword': False, + 'name': 'batch_mode', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': ''' + Max number of files in upload batch. + + This sets the batch size of files to upload. It has to be less than 50. + + By default this is 0 which means rclone will calculate the batch size + depending on the setting of batch_mode. + + - batch_mode: async - default batch_size is 50 + - batch_mode: sync - default batch_size is the same as --transfers + - batch_mode: off - not in use + + Rclone will close any outstanding batches when it exits which may make + a delay on quit. + + Setting this is a great idea if you are uploading lots of small files + as it will make them a lot quicker. You can use --transfers 32 to + maximise throughput. + + ''', + 'ispassword': False, + 'name': 'batch_size', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0s', + 'exclusive': False, + 'help': ''' + Max time to allow an idle upload batch before uploading. + + If an upload batch is idle for more than this long then it will be + uploaded. + + The default for this is 0 which means rclone will choose a sensible + default based on the batch_mode in use. + + - batch_mode: async - default batch_timeout is 10s + - batch_mode: sync - default batch_timeout is 1s + - batch_mode: off - not in use + + ''', + 'ispassword': False, + 'name': 'batch_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 600000000000.0, + 'default_str': '10m0s', + 'exclusive': False, + 'help': 'Max time to wait for a batch to finish committing. (no longer used)', + 'ispassword': False, + 'name': 'batch_commit_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'gphotos', + }), + dict({ + 'description': 'Hadoop distributed file system', + 'name': 'hdfs', + 'options': list([ + dict({ + 'advanced': False, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Hadoop name nodes and ports. + + E.g. "namenode-1:8020,namenode-2:8020,..." to connect to host namenodes at port 8020. + ''', + 'ispassword': False, + 'name': 'namenode', + 'required': True, + 'sensitive': True, + 'type': 'CommaSepList', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Connect to hdfs as root.', + 'value': 'root', + }), + ]), + 'exclusive': False, + 'help': 'Hadoop user name.', + 'ispassword': False, + 'name': 'username', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Kerberos service principal name for the namenode. + + Enables KERBEROS authentication. Specifies the Service Principal Name + (SERVICE/FQDN) for the namenode. E.g. \"hdfs/namenode.hadoop.docker\" + for namenode running as service 'hdfs' with FQDN 'namenode.hadoop.docker'. + ''', + 'ispassword': False, + 'name': 'service_principal_name', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Ensure authentication, integrity and encryption enabled.', + 'value': 'privacy', + }), + ]), + 'exclusive': False, + 'help': ''' + Kerberos data transfer protection: authentication|integrity|privacy. + + Specifies whether or not authentication, data signature integrity + checks, and wire encryption are required when communicating with + the datanodes. Possible values are 'authentication', 'integrity' + and 'privacy'. Used only with KERBEROS enabled. + ''', + 'ispassword': False, + 'name': 'data_transfer_protection', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50430082.0, + 'default_str': 'Slash,Colon,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'hdfs', + }), + dict({ + 'description': 'HiDrive', + 'name': 'hidrive', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': 'rw', + 'default_str': 'rw', + 'examples': list([ + dict({ + 'help': 'Read and write access to resources.', + 'value': 'rw', + }), + dict({ + 'help': 'Read-only access to resources.', + 'value': 'ro', + }), + ]), + 'exclusive': False, + 'help': 'Access permissions that rclone should use when requesting access from HiDrive.', + 'ispassword': False, + 'name': 'scope_access', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'user', + 'default_str': 'user', + 'examples': list([ + dict({ + 'help': ''' + User-level access to management permissions. + This will be sufficient in most cases. + ''', + 'value': 'user', + }), + dict({ + 'help': 'Extensive access to management permissions.', + 'value': 'admin', + }), + dict({ + 'help': 'Full access to management permissions.', + 'value': 'owner', + }), + ]), + 'exclusive': False, + 'help': 'User-level that rclone should use when requesting access from HiDrive.', + 'ispassword': False, + 'name': 'scope_role', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '/', + 'default_str': '/', + 'examples': list([ + dict({ + 'help': ''' + The topmost directory accessible by rclone. + This will be equivalent with "root" if rclone uses a regular HiDrive user account. + ''', + 'value': '/', + }), + dict({ + 'help': 'The topmost directory of the HiDrive user account', + 'value': 'root', + }), + dict({ + 'help': ''' + This specifies that there is no root-prefix for your paths. + When using this you will always need to specify paths to this remote with a valid parent e.g. "remote:/path/to/dir" or "remote:root/path/to/dir". + ''', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + The root/parent folder for all paths. + + Fill in to use the specified folder as the parent for all paths given to the remote. + This way rclone can use any folder as its starting point. + ''', + 'ispassword': False, + 'name': 'root_prefix', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'https://api.hidrive.strato.com/2.1', + 'default_str': 'https://api.hidrive.strato.com/2.1', + 'exclusive': False, + 'help': ''' + Endpoint for the service. + + This is the URL that API-calls will be made to. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Do not fetch number of objects in directories unless it is absolutely necessary. + + Requests may be faster if the number of objects in subdirectories is not fetched. + ''', + 'ispassword': False, + 'name': 'disable_fetching_member_count', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50331648.0, + 'default_str': '48Mi', + 'exclusive': False, + 'help': ''' + Chunksize for chunked uploads. + + Any files larger than the configured cutoff (or files of unknown size) will be uploaded in chunks of this size. + + The upper limit for this is 2147483647 bytes (about 2.000Gi). + That is the maximum amount of bytes a single upload-operation will support. + Setting this above the upper limit or to a negative value will cause uploads to fail. + + Setting this to larger values may increase the upload speed at the cost of using more memory. + It can be set to smaller values smaller to save on memory. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 100663296.0, + 'default_str': '96Mi', + 'exclusive': False, + 'help': ''' + Cutoff/Threshold for chunked uploads. + + Any files larger than this will be uploaded in chunks of the configured chunksize. + + The upper limit for this is 2147483647 bytes (about 2.000Gi). + That is the maximum amount of bytes a single upload-operation will support. + Setting this above the upper limit will cause uploads to fail. + ''', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 4.0, + 'default_str': '4', + 'exclusive': False, + 'help': ''' + Concurrency for chunked uploads. + + This is the upper limit for how many transfers for the same file are running concurrently. + Setting this above to a value smaller than 1 will cause uploads to deadlock. + + If you are uploading small numbers of large files over high-speed links + and these uploads do not fully utilize your bandwidth, then increasing + this may help to speed up the transfers. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 33554434.0, + 'default_str': 'Slash,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'hidrive', + }), + dict({ + 'description': 'HTTP', + 'name': 'http', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + URL of HTTP host to connect to. + + E.g. "https://example.com", or "https://user:pass@example.com" to use a username and password. + ''', + 'ispassword': False, + 'name': 'url', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Set HTTP headers for all transactions. + + Use this to set additional HTTP headers for all transactions. + + The input format is comma separated list of key,value pairs. Standard + [CSV encoding](https://godoc.org/encoding/csv) may be used. + + For example, to set a Cookie use 'Cookie,name=value', or '"Cookie","name=value"'. + + You can set multiple headers, e.g. '"Cookie","name=value","Authorization","xxx"'. + ''', + 'ispassword': False, + 'name': 'headers', + 'required': False, + 'sensitive': False, + 'type': 'CommaSepList', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Set this if the site doesn't end directories with /. + + Use this if your target website does not use / on the end of + directories. + + A / on the end of a path is how rclone normally tells the difference + between files and directories. If this flag is set, then rclone will + treat all files with Content-Type: text/html as directories and read + URLs from them rather than downloading them. + + Note that this may cause rclone to confuse genuine HTML files with + directories. + ''', + 'ispassword': False, + 'name': 'no_slash', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Don't use HEAD requests. + + HEAD requests are mainly used to find file sizes in dir listing. + If your site is being very slow to load then you can try this option. + Normally rclone does a HEAD request for each potential file in a + directory listing to: + + - find its size + - check it really exists + - check to see if it is a directory + + If you set this option, rclone will not do the HEAD request. This will mean + that directory listings are much quicker, but rclone won't have the times or + sizes of any files, and some files that don't exist may be in the listing. + ''', + 'ispassword': False, + 'name': 'no_head', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Do not escape URL metacharacters in path names.', + 'ispassword': False, + 'name': 'no_escape', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'http', + }), + dict({ + 'description': 'iCloud Drive', + 'name': 'iclouddrive', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Apple ID.', + 'ispassword': False, + 'name': 'apple_id', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Password.', + 'ispassword': True, + 'name': 'password', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Trust token (internal use)', + 'ispassword': False, + 'name': 'trust_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'cookies (internal use only)', + 'ispassword': False, + 'name': 'cookies', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'd39ba9916b7251055b22c7f910e2ea796ee65e98b2ddecea8f5dde8d9d1a815d', + 'default_str': 'd39ba9916b7251055b22c7f910e2ea796ee65e98b2ddecea8f5dde8d9d1a815d', + 'exclusive': False, + 'help': 'Client id', + 'ispassword': False, + 'name': 'client_id', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50438146.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'iclouddrive', + }), + dict({ + 'description': 'ImageKit.io', + 'name': 'imagekit', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'You can find your ImageKit.io URL endpoint in your [dashboard](https://imagekit.io/dashboard/developer/api-keys)', + 'ispassword': False, + 'name': 'endpoint', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'You can find your ImageKit.io public key in your [dashboard](https://imagekit.io/dashboard/developer/api-keys)', + 'ispassword': False, + 'name': 'public_key', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'You can find your ImageKit.io private key in your [dashboard](https://imagekit.io/dashboard/developer/api-keys)', + 'ispassword': False, + 'name': 'private_key', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'If you have configured `Restrict unsigned image URLs` in your dashboard settings, set this to true.', + 'ispassword': False, + 'name': 'only_signed', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Include old versions in directory listings.', + 'ispassword': False, + 'name': 'versions', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Tags to add to the uploaded files, e.g. "tag1,tag2".', + 'ispassword': False, + 'name': 'upload_tags', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 117553486.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Dollar,Question,Hash,Percent,BackSlash,Del,Ctl,InvalidUtf8,Dot,SquareBracket', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'imagekit', + }), + dict({ + 'description': 'Internet Archive', + 'name': 'internetarchive', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + IAS3 Access Key. + + Leave blank for anonymous access. + You can find one here: https://archive.org/account/s3.php + ''', + 'ispassword': False, + 'name': 'access_key_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + IAS3 Secret Key (password). + + Leave blank for anonymous access. + ''', + 'ispassword': False, + 'name': 'secret_access_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'https://s3.us.archive.org', + 'default_str': 'https://s3.us.archive.org', + 'exclusive': False, + 'help': ''' + IAS3 Endpoint. + + Leave blank for default value. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'https://archive.org', + 'default_str': 'https://archive.org', + 'exclusive': False, + 'help': ''' + Host of InternetArchive Frontend. + + Leave blank for default value. + ''', + 'ispassword': False, + 'name': 'front_endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '[]', + 'exclusive': False, + 'help': ''' + Metadata to be set on the IA item, this is different from file-level metadata that can be set using --metadata-set. + Format is key=value and the 'x-archive-meta-' prefix is automatically added. + ''', + 'ispassword': False, + 'name': 'item_metadata', + 'required': False, + 'sensitive': False, + 'type': 'stringArray', + }), + dict({ + 'advanced': False, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Whether to trigger derive on the IA item or not. If set to false, the item will not be derived by IA upon upload. + The derive process produces a number of secondary files from an upload to make an upload more usable on the web. + Setting this to false is useful for uploading files that are already in a format that IA can display or reduce burden on IA's infrastructure. + ''', + 'ispassword': False, + 'name': 'item_derive', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Don't ask the server to test against MD5 checksum calculated by rclone. + Normally rclone will calculate the MD5 checksum of the input before + uploading it so it can ask the server to check the object against checksum. + This is great for data integrity checking but can cause long delays for + large files to start uploading. + ''', + 'ispassword': False, + 'name': 'disable_checksum', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0s', + 'exclusive': False, + 'help': ''' + Timeout for waiting the server's processing tasks (specifically archive and book_op) to finish. + Only enable if you need to be guaranteed to be reflected after write operations. + 0 to disable waiting. No errors to be thrown in case of timeout. + ''', + 'ispassword': False, + 'name': 'wait_archive', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 50446342.0, + 'default_str': 'Slash,LtGt,CrLf,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'internetarchive', + }), + dict({ + 'description': 'Jottacloud', + 'name': 'jottacloud', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 10485760.0, + 'default_str': '10Mi', + 'exclusive': False, + 'help': 'Files bigger than this will be cached on disk to calculate the MD5 if required.', + 'ispassword': False, + 'name': 'md5_memory_limit', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Only show files that are in the trash. + + This will show trashed files in their original directory structure. + ''', + 'ispassword': False, + 'name': 'trashed_only', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Delete files permanently rather than putting them into the trash.', + 'ispassword': False, + 'name': 'hard_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 10485760.0, + 'default_str': '10Mi', + 'exclusive': False, + 'help': "Files bigger than this can be resumed if the upload fail's.", + 'ispassword': False, + 'name': 'upload_resume_limit', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Avoid server side versioning by deleting files and recreating files instead of overwriting them.', + 'ispassword': False, + 'name': 'no_versions', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50431886.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'jottacloud', + }), + dict({ + 'description': 'Koofr, Digi Storage and other Koofr-compatible storage providers', + 'name': 'koofr', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Koofr, https://app.koofr.net/', + 'value': 'koofr', + }), + dict({ + 'help': 'Digi Storage, https://storage.rcs-rds.ro/', + 'value': 'digistorage', + }), + dict({ + 'help': 'Any other Koofr API compatible storage service', + 'value': 'other', + }), + ]), + 'exclusive': False, + 'help': 'Choose your storage provider.', + 'ispassword': False, + 'name': 'provider', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The Koofr API endpoint to use.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'other', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Mount ID of the mount to use. + + If omitted, the primary mount is used. + ''', + 'ispassword': False, + 'name': 'mountid', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Does the backend support setting modification time. + + Set this to false if you use a mount ID that points to a Dropbox or Amazon Drive backend. + ''', + 'ispassword': False, + 'name': 'setmtime', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Your user name.', + 'ispassword': False, + 'name': 'user', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Your password for rclone generate one at https://app.koofr.net/app/admin/preferences/password.', + 'ispassword': True, + 'name': 'password', + 'provider': 'koofr', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Your password for rclone generate one at https://storage.rcs-rds.ro/app/admin/preferences/password.', + 'ispassword': True, + 'name': 'password', + 'provider': 'digistorage', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': "Your password for rclone (generate one at your service's settings page).", + 'ispassword': True, + 'name': 'password', + 'provider': 'other', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50438146.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'koofr', + }), + dict({ + 'description': 'Linkbox', + 'name': 'linkbox', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Token from https://www.linkbox.to/admin/account', + 'ispassword': False, + 'name': 'token', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'linkbox', + }), + dict({ + 'description': 'Mail.ru Cloud', + 'name': 'mailru', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'User name (usually email).', + 'ispassword': False, + 'name': 'user', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Password. + + This must be an app password - rclone will not work with your normal + password. See the Configuration section in the docs for how to make an + app password. + + ''', + 'ispassword': True, + 'name': 'pass', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': True, + 'default_str': 'true', + 'examples': list([ + dict({ + 'help': 'Enable', + 'value': 'true', + }), + dict({ + 'help': 'Disable', + 'value': 'false', + }), + ]), + 'exclusive': False, + 'help': ''' + Skip full upload if there is another file with same data hash. + + This feature is called "speedup" or "put by hash". It is especially efficient + in case of generally available files like popular books, video or audio clips, + because files are searched by hash in all accounts of all mailru users. + It is meaningless and ineffective if source file is unique or encrypted. + Please note that rclone may need local memory and disk space to calculate + content hash in advance and decide whether full upload is required. + Also, if rclone does not know file size in advance (e.g. in case of + streaming or partial uploads), it will not even try this optimization. + ''', + 'ispassword': False, + 'name': 'speedup_enable', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf', + 'default_str': '*.mkv,*.avi,*.mp4,*.mp3,*.zip,*.gz,*.rar,*.pdf', + 'examples': list([ + dict({ + 'help': 'Empty list completely disables speedup (put by hash).', + 'value': '', + }), + dict({ + 'help': 'All files will be attempted for speedup.', + 'value': '*', + }), + dict({ + 'help': 'Only common audio/video files will be tried for put by hash.', + 'value': '*.mkv,*.avi,*.mp4,*.mp3', + }), + dict({ + 'help': 'Only common archives or PDF books will be tried for speedup.', + 'value': '*.zip,*.gz,*.rar,*.pdf', + }), + ]), + 'exclusive': False, + 'help': ''' + Comma separated list of file name patterns eligible for speedup (put by hash). + + Patterns are case insensitive and can contain '*' or '?' meta characters. + ''', + 'ispassword': False, + 'name': 'speedup_file_patterns', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 3221225472.0, + 'default_str': '3Gi', + 'examples': list([ + dict({ + 'help': 'Completely disable speedup (put by hash).', + 'value': '0', + }), + dict({ + 'help': 'Files larger than 1Gb will be uploaded directly.', + 'value': '1G', + }), + dict({ + 'help': 'Choose this option if you have less than 3Gb free on local disk.', + 'value': '3G', + }), + ]), + 'exclusive': False, + 'help': ''' + This option allows you to disable speedup (put by hash) for large files. + + Reason is that preliminary hashing can exhaust your RAM or disk space. + ''', + 'ispassword': False, + 'name': 'speedup_max_disk', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 33554432.0, + 'default_str': '32Mi', + 'examples': list([ + dict({ + 'help': 'Preliminary hashing will always be done in a temporary disk location.', + 'value': '0', + }), + dict({ + 'help': 'Do not dedicate more than 32Mb RAM for preliminary hashing.', + 'value': '32M', + }), + dict({ + 'help': 'You have at most 256Mb RAM free for hash calculations.', + 'value': '256M', + }), + ]), + 'exclusive': False, + 'help': 'Files larger than the size given below will always be hashed on disk.', + 'ispassword': False, + 'name': 'speedup_max_memory', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'examples': list([ + dict({ + 'help': 'Fail with error.', + 'value': 'true', + }), + dict({ + 'help': 'Ignore and continue.', + 'value': 'false', + }), + ]), + 'exclusive': False, + 'help': 'What should copy do if file checksum is mismatched or invalid.', + 'ispassword': False, + 'name': 'check_hash', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + HTTP user agent used internally by client. + + Defaults to "rclone/VERSION" or "--user-agent" provided on command line. + ''', + 'ispassword': False, + 'name': 'user_agent', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Comma separated list of internal maintenance flags. + + This option must not be used by an ordinary user. It is intended only to + facilitate remote troubleshooting of backend issues. Strict meaning of + flags is not documented and not guaranteed to persist between releases. + Quirks will be removed when the backend grows stable. + Supported quirks: atomicmkdir binlist unknowndirs + ''', + 'ispassword': False, + 'name': 'quirks', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50440078.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'mailru', + }), + dict({ + 'description': 'Mega', + 'name': 'mega', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'User name.', + 'ispassword': False, + 'name': 'user', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Password.', + 'ispassword': True, + 'name': 'pass', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Output more debug from Mega. + + If this flag is set (along with -vv) it will print further debugging + information from the mega backend. + ''', + 'ispassword': False, + 'name': 'debug', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Delete files permanently rather than putting them into the trash. + + Normally the mega backend will put all deletions into the trash rather + than permanently deleting them. If you specify this then rclone will + permanently delete objects instead. + ''', + 'ispassword': False, + 'name': 'hard_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use HTTPS for transfers. + + MEGA uses plain text HTTP connections by default. + Some ISPs throttle HTTP connections, this causes transfers to become very slow. + Enabling this will force MEGA to use HTTPS for all transfers. + HTTPS is normally not necessary since all data is already encrypted anyway. + Enabling it will increase CPU usage and add network overhead. + ''', + 'ispassword': False, + 'name': 'use_https', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50331650.0, + 'default_str': 'Slash,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'mega', + }), + dict({ + 'description': 'Akamai NetStorage', + 'name': 'netstorage', + 'options': list([ + dict({ + 'advanced': True, + 'default': 'https', + 'default_str': 'https', + 'examples': list([ + dict({ + 'help': 'HTTP protocol', + 'value': 'http', + }), + dict({ + 'help': 'HTTPS protocol', + 'value': 'https', + }), + ]), + 'exclusive': False, + 'help': ''' + Select between HTTP or HTTPS protocol. + + Most users should choose HTTPS, which is the default. + HTTP is provided primarily for debugging purposes. + ''', + 'ispassword': False, + 'name': 'protocol', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Domain+path of NetStorage host to connect to. + + Format should be `/` + ''', + 'ispassword': False, + 'name': 'host', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Set the NetStorage account name', + 'ispassword': False, + 'name': 'account', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Set the NetStorage account secret/G2O key for authentication. + + Please choose the 'y' option to set your own password then enter your secret. + ''', + 'ispassword': True, + 'name': 'secret', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'netstorage', + }), + dict({ + 'description': 'Microsoft OneDrive', + 'name': 'onedrive', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': 'global', + 'default_str': 'global', + 'examples': list([ + dict({ + 'help': 'Microsoft Cloud Global', + 'value': 'global', + }), + dict({ + 'help': 'Microsoft Cloud for US Government', + 'value': 'us', + }), + dict({ + 'help': 'Microsoft Cloud Germany (deprecated - try global region first).', + 'value': 'de', + }), + dict({ + 'help': 'Azure and Office 365 operated by Vnet Group in China', + 'value': 'cn', + }), + ]), + 'exclusive': False, + 'help': 'Choose national cloud region for OneDrive.', + 'ispassword': False, + 'name': 'region', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': -1.0, + 'default_str': 'off', + 'exclusive': False, + 'help': ''' + Cutoff for switching to chunked upload. + + Any files larger than this will be uploaded in chunks of chunk_size. + + This is disabled by default as uploading using single part uploads + causes rclone to use twice the storage on Onedrive business as when + rclone sets the modification time after the upload Onedrive creates a + new version. + + See: https://github.com/rclone/rclone/issues/1716 + + ''', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 10485760.0, + 'default_str': '10Mi', + 'exclusive': False, + 'help': ''' + Chunk size to upload files with - must be multiple of 320k (327,680 bytes). + + Above this size files will be chunked - must be multiple of 320k (327,680 bytes) and + should not exceed 250M (262,144,000 bytes) else you may encounter \"Microsoft.SharePoint.Client.InvalidClientQueryException: The request message is too big.\" + Note that the chunks will be buffered into memory. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The ID of the drive to use.', + 'ispassword': False, + 'name': 'drive_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The type of the drive (personal | business | documentLibrary).', + 'ispassword': False, + 'name': 'drive_type', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the root folder. + + This isn't normally needed, but in special circumstances you might + know the folder ID that you wish to access but not be able to get + there through a path traversal. + + ''', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': list([ + 'Files.Read', + 'Files.ReadWrite', + 'Files.Read.All', + 'Files.ReadWrite.All', + 'Sites.Read.All', + 'offline_access', + ]), + 'default_str': 'Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access', + 'examples': list([ + dict({ + 'help': 'Read and write access to all resources', + 'value': 'Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All Sites.Read.All offline_access', + }), + dict({ + 'help': 'Read only access to all resources', + 'value': 'Files.Read Files.Read.All Sites.Read.All offline_access', + }), + dict({ + 'help': ''' + Read and write access to all resources, without the ability to browse SharePoint sites. + Same as if disable_site_permission was set to true + ''', + 'value': 'Files.Read Files.ReadWrite Files.Read.All Files.ReadWrite.All offline_access', + }), + ]), + 'exclusive': False, + 'help': ''' + Set scopes to be requested by rclone. + + Choose or manually enter a custom space separated list with all scopes, that rclone should request. + + ''', + 'ispassword': False, + 'name': 'access_scopes', + 'required': False, + 'sensitive': False, + 'type': 'SpaceSepList', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the service principal's tenant. Also called its directory ID. + + Set this if using + - Client Credential flow + + ''', + 'ispassword': False, + 'name': 'tenant', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Disable the request for Sites.Read.All permission. + + If set to true, you will no longer be able to search for a SharePoint site when + configuring drive ID, because rclone will not request Sites.Read.All permission. + Set it to true if your organization didn't assign Sites.Read.All permission to the + application, and your organization disallows users to consent app permission + request on their own. + ''', + 'ispassword': False, + 'name': 'disable_site_permission', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Set to make OneNote files show up in directory listings. + + By default, rclone will hide OneNote files in directory listings because + operations like "Open" and "Update" won't work on them. But this + behaviour may also prevent you from deleting them. If you want to + delete OneNote files or otherwise want them to show up in directory + listing, set this option. + ''', + 'ispassword': False, + 'name': 'expose_onenote_files', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Deprecated: use --server-side-across-configs instead. + + Allow server-side operations (e.g. copy) to work across different onedrive configs. + + This will work if you are copying between two OneDrive *Personal* drives AND the files to + copy are already shared between them. Additionally, it should also function for a user who + has access permissions both between Onedrive for *business* and *SharePoint* under the *same + tenant*, and between *SharePoint* and another *SharePoint* under the *same tenant*. In other + cases, rclone will fall back to normal copy (which will be slightly slower). + ''', + 'ispassword': False, + 'name': 'server_side_across_configs', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 1000.0, + 'default_str': '1000', + 'exclusive': False, + 'help': 'Size of listing chunk.', + 'ispassword': False, + 'name': 'list_chunk', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Remove all versions on modifying operations. + + Onedrive for business creates versions when rclone uploads new files + overwriting an existing one and when it sets the modification time. + + These versions take up space out of the quota. + + This flag checks for versions after file upload and setting + modification time and removes all but the last version. + + **NB** Onedrive personal can't currently delete versions so don't use + this flag there. + + ''', + 'ispassword': False, + 'name': 'no_versions', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Permanently delete files on removal. + + Normally files will get sent to the recycle bin on deletion. Setting + this flag causes them to be permanently deleted. Use with care. + + OneDrive personal accounts do not support the permanentDelete API, + it only applies to OneDrive for Business and SharePoint document libraries. + + ''', + 'ispassword': False, + 'name': 'hard_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 'anonymous', + 'default_str': 'anonymous', + 'examples': list([ + dict({ + 'help': ''' + Anyone with the link has access, without needing to sign in. + This may include people outside of your organization. + Anonymous link support may be disabled by an administrator. + ''', + 'value': 'anonymous', + }), + dict({ + 'help': ''' + Anyone signed into your organization (tenant) can use the link to get access. + Only available in OneDrive for Business and SharePoint. + ''', + 'value': 'organization', + }), + ]), + 'exclusive': False, + 'help': 'Set the scope of the links created by the link command.', + 'ispassword': False, + 'name': 'link_scope', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'view', + 'default_str': 'view', + 'examples': list([ + dict({ + 'help': 'Creates a read-only link to the item.', + 'value': 'view', + }), + dict({ + 'help': 'Creates a read-write link to the item.', + 'value': 'edit', + }), + dict({ + 'help': 'Creates an embeddable link to the item.', + 'value': 'embed', + }), + ]), + 'exclusive': False, + 'help': 'Set the type of the links created by the link command.', + 'ispassword': False, + 'name': 'link_type', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Set the password for links created by the link command. + + At the time of writing this only works with OneDrive personal paid accounts. + + ''', + 'ispassword': False, + 'name': 'link_password', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'auto', + 'default_str': 'auto', + 'examples': list([ + dict({ + 'help': 'Rclone chooses the best hash', + 'value': 'auto', + }), + dict({ + 'help': 'QuickXor', + 'value': 'quickxor', + }), + dict({ + 'help': 'SHA1', + 'value': 'sha1', + }), + dict({ + 'help': 'SHA256', + 'value': 'sha256', + }), + dict({ + 'help': 'CRC32', + 'value': 'crc32', + }), + dict({ + 'help': "None - don't use any hashes", + 'value': 'none', + }), + ]), + 'exclusive': False, + 'help': ''' + Specify the hash in use for the backend. + + This specifies the hash type in use. If set to "auto" it will use the + default hash which is QuickXorHash. + + Before rclone 1.62 an SHA1 hash was used by default for Onedrive + Personal. For 1.62 and later the default is to use a QuickXorHash for + all onedrive types. If an SHA1 hash is desired then set this option + accordingly. + + From July 2023 QuickXorHash will be the only available hash for + both OneDrive for Business and OneDrive Personal. + + This can be set to "none" to not use any hashes. + + If the hash requested does not exist on the object, it will be + returned as an empty string which is treated as a missing hash by + rclone. + + ''', + 'ispassword': False, + 'name': 'hash_type', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Allows download of files the server thinks has a virus. + + The onedrive/sharepoint server may check files uploaded with an Anti + Virus checker. If it detects any potential viruses or malware it will + block download of the file. + + In this case you will see a message like this + + server reports this file is infected with a virus - use --onedrive-av-override to download anyway: Infected (name of virus): 403 Forbidden: + + If you are 100% sure you want to download this file anyway then use + the --onedrive-av-override flag, or av_override = true in the config + file. + + ''', + 'ispassword': False, + 'name': 'av_override', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set rclone will use delta listing to implement recursive listings. + + If this flag is set the onedrive backend will advertise `ListR` + support for recursive listings. + + Setting this flag speeds up these things greatly: + + rclone lsf -R onedrive: + rclone size onedrive: + rclone rc vfs/refresh recursive=true + + **However** the delta listing API **only** works at the root of the + drive. If you use it not at the root then it recurses from the root + and discards all the data that is not under the directory you asked + for. So it will be correct but may not be very efficient. + + This is why this flag is not set as the default. + + As a rule of thumb if nearly all of your data is under rclone's root + directory (the `root/directory` in `onedrive:root/directory`) then + using this flag will be be a big performance win. If your data is + mostly not under the root then using this flag will be a big + performance loss. + + It is recommended if you are mounting your onedrive at the root + (or near the root when using crypt) and using rclone `rc vfs/refresh`. + + ''', + 'ispassword': False, + 'name': 'delta', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': 'off', + 'examples': list([ + dict({ + 'help': 'Do not read or write the value', + 'value': 'off', + }), + dict({ + 'help': 'Read the value only', + 'value': 'read', + }), + dict({ + 'help': 'Write the value only', + 'value': 'write', + }), + dict({ + 'help': 'Read and Write the value.', + 'value': 'read,write', + }), + dict({ + 'help': "If writing fails log errors only, don't fail the transfer", + 'value': 'failok', + }), + ]), + 'exclusive': False, + 'help': ''' + Control whether permissions should be read or written in metadata. + + Reading permissions metadata from files can be done quickly, but it + isn't always desirable to set the permissions from the metadata. + + ''', + 'ispassword': False, + 'name': 'metadata_permissions', + 'required': False, + 'sensitive': False, + 'type': 'Bits', + }), + dict({ + 'advanced': True, + 'default': 57386894.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'onedrive', + }), + dict({ + 'description': 'OpenDrive', + 'name': 'opendrive', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Username.', + 'ispassword': False, + 'name': 'username', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Password.', + 'ispassword': True, + 'name': 'password', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 62007182.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,LeftSpace,LeftCrLfHtVt,RightSpace,RightCrLfHtVt,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': 10485760.0, + 'default_str': '10Mi', + 'exclusive': False, + 'help': ''' + Files will be uploaded in chunks this size. + + Note that these chunks are buffered in memory so increasing them will + increase memory use. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 'private', + 'default_str': 'private', + 'examples': list([ + dict({ + 'help': 'The file or folder access can be granted in a way that will allow select users to view, read or write what is absolutely essential for them.', + 'value': 'private', + }), + dict({ + 'help': 'The file or folder can be downloaded by anyone from a web browser. The link can be shared in any way,', + 'value': 'public', + }), + dict({ + 'help': 'The file or folder can be accessed has the same restrictions as Public if the user knows the URL of the file or folder link in order to access the contents', + 'value': 'hidden', + }), + ]), + 'exclusive': False, + 'help': 'Files and folders will be uploaded with this access permission (default private)', + 'ispassword': False, + 'name': 'access', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'opendrive', + }), + dict({ + 'description': 'Oracle Cloud Infrastructure Object Storage', + 'name': 'oracleobjectstorage', + 'options': list([ + dict({ + 'advanced': False, + 'default': 'env_auth', + 'default_str': 'env_auth', + 'examples': list([ + dict({ + 'help': 'automatically pickup the credentials from runtime(env), first one to provide auth wins', + 'value': 'env_auth', + }), + dict({ + 'help': ''' + use an OCI user and an API key for authentication. + you’ll need to put in a config file your tenancy OCID, user OCID, region, the path, fingerprint to an API key. + https://docs.oracle.com/en-us/iaas/Content/API/Concepts/sdkconfig.htm + ''', + 'value': 'user_principal_auth', + }), + dict({ + 'help': ''' + use instance principals to authorize an instance to make API calls. + each instance has its own identity, and authenticates using the certificates that are read from instance metadata. + https://docs.oracle.com/en-us/iaas/Content/Identity/Tasks/callingservicesfrominstances.htm + ''', + 'value': 'instance_principal_auth', + }), + dict({ + 'help': ''' + use workload identity to grant OCI Container Engine for Kubernetes workloads policy-driven access to OCI resources using OCI Identity and Access Management (IAM). + https://docs.oracle.com/en-us/iaas/Content/ContEng/Tasks/contenggrantingworkloadaccesstoresources.htm + ''', + 'value': 'workload_identity_auth', + }), + dict({ + 'help': 'use resource principals to make API calls', + 'value': 'resource_principal_auth', + }), + dict({ + 'help': 'no credentials needed, this is typically for reading public buckets', + 'value': 'no_auth', + }), + ]), + 'exclusive': False, + 'help': 'Choose your Auth Provider', + 'ispassword': False, + 'name': 'provider', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Object storage namespace', + 'ispassword': False, + 'name': 'namespace', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Specify compartment OCID, if you need to list buckets. + + List objects works without compartment OCID. + ''', + 'ispassword': False, + 'name': 'compartment', + 'provider': '!no_auth', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Object storage Region', + 'ispassword': False, + 'name': 'region', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for Object storage API. + + Leave blank to use the default endpoint for the region. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '~/.oci/config', + 'default_str': '~/.oci/config', + 'examples': list([ + dict({ + 'help': 'oci configuration file location', + 'value': '~/.oci/config', + }), + ]), + 'exclusive': False, + 'help': 'Path to OCI config file', + 'ispassword': False, + 'name': 'config_file', + 'provider': 'user_principal_auth', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'Default', + 'default_str': 'Default', + 'examples': list([ + dict({ + 'help': 'Use the default profile', + 'value': 'Default', + }), + ]), + 'exclusive': False, + 'help': 'Profile name inside the oci config file', + 'ispassword': False, + 'name': 'config_profile', + 'provider': 'user_principal_auth', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'Standard', + 'default_str': 'Standard', + 'examples': list([ + dict({ + 'help': 'Standard storage tier, this is the default tier', + 'value': 'Standard', + }), + dict({ + 'help': 'InfrequentAccess storage tier', + 'value': 'InfrequentAccess', + }), + dict({ + 'help': 'Archive storage tier', + 'value': 'Archive', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in storage. https://docs.oracle.com/en-us/iaas/Content/Object/Concepts/understandingstoragetiers.htm', + 'ispassword': False, + 'name': 'storage_tier', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 209715200.0, + 'default_str': '200Mi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to chunked upload. + + Any files larger than this will be uploaded in chunks of chunk_size. + The minimum is 0 and the maximum is 5 GiB. + ''', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 5242880.0, + 'default_str': '5Mi', + 'exclusive': False, + 'help': ''' + Chunk size to use for uploading. + + When uploading files larger than upload_cutoff or files with unknown + size (e.g. from "rclone rcat" or uploaded with "rclone mount" they will be uploaded + as multipart uploads using this chunk size. + + Note that "upload_concurrency" chunks of this size are buffered + in memory per transfer. + + If you are transferring large files over high-speed links and you have + enough memory, then increasing this will speed up the transfers. + + Rclone will automatically increase the chunk size when uploading a + large file of known size to stay below the 10,000 chunks limit. + + Files of unknown size are uploaded with the configured + chunk_size. Since the default chunk size is 5 MiB and there can be at + most 10,000 chunks, this means that by default the maximum size of + a file you can stream upload is 48 GiB. If you wish to stream upload + larger files then you will need to increase chunk_size. + + Increasing the chunk size decreases the accuracy of the progress + statistics displayed with "-P" flag. + + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 10000.0, + 'default_str': '10000', + 'exclusive': False, + 'help': ''' + Maximum number of parts in a multipart upload. + + This option defines the maximum number of multipart chunks to use + when doing a multipart upload. + + OCI has max parts limit of 10,000 chunks. + + Rclone will automatically increase the chunk size when uploading a + large file of a known size to stay below this number of chunks limit. + + ''', + 'ispassword': False, + 'name': 'max_upload_parts', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 10.0, + 'default_str': '10', + 'exclusive': False, + 'help': ''' + Concurrency for multipart uploads. + + This is the number of chunks of the same file that are uploaded + concurrently. + + If you are uploading small numbers of large files over high-speed links + and these uploads do not fully utilize your bandwidth, then increasing + this may help to speed up the transfers. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 4999610368.0, + 'default_str': '4.656Gi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to multipart copy. + + Any files larger than this that need to be server-side copied will be + copied in chunks of this size. + + The minimum is 0 and the maximum is 5 GiB. + ''', + 'ispassword': False, + 'name': 'copy_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': ''' + Timeout for copy. + + Copy is an asynchronous operation, specify timeout to wait for copy to succeed + + ''', + 'ispassword': False, + 'name': 'copy_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Don't store MD5 checksum with object metadata. + + Normally rclone will calculate the MD5 checksum of the input before + uploading it so it can add it to metadata on the object. This is great + for data integrity checking but can cause long delays for large files + to start uploading. + ''', + 'ispassword': False, + 'name': 'disable_checksum', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50331650.0, + 'default_str': 'Slash,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If true avoid calling abort upload on a failure, leaving all successfully uploaded parts for manual recovery. + + It should be set to true for resuming uploads across different sessions. + + WARNING: Storing parts of an incomplete multipart upload counts towards space usage on object storage and will add + additional costs if not cleaned up. + + ''', + 'ispassword': False, + 'name': 'leave_parts_on_error', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If true attempt to resume previously started multipart upload for the object. + This will be helpful to speed up multipart transfers by resuming uploads from past session. + + WARNING: If chunk size differs in resumed session from past incomplete session, then the resumed multipart upload is + aborted and a new multipart upload is started with the new chunk size. + + The flag leave_parts_on_error must be true to resume and optimize to skip parts that were already uploaded successfully. + + ''', + 'ispassword': False, + 'name': 'attempt_resume_upload', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set, don't attempt to check the bucket exists or create it. + + This can be useful when trying to minimise the number of transactions + rclone does if you know the bucket exists already. + + It can also be needed if the user you are using does not have bucket + creation permissions. + + ''', + 'ispassword': False, + 'name': 'no_check_bucket', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + To use SSE-C, a file containing the base64-encoded string of the AES-256 encryption key associated + with the object. Please note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is needed.' + ''', + 'ispassword': False, + 'name': 'sse_customer_key_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + To use SSE-C, the optional header that specifies the base64-encoded 256-bit encryption key to use to + encrypt or decrypt the data. Please note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is + needed. For more information, see Using Your Own Keys for Server-Side Encryption + (https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm) + ''', + 'ispassword': False, + 'name': 'sse_customer_key', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + If using SSE-C, The optional header that specifies the base64-encoded SHA256 hash of the encryption + key. This value is used to check the integrity of the encryption key. see Using Your Own Keys for + Server-Side Encryption (https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm). + ''', + 'ispassword': False, + 'name': 'sse_customer_key_sha256', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + if using your own master key in vault, this header specifies the + OCID (https://docs.cloud.oracle.com/Content/General/Concepts/identifiers.htm) of a master encryption key used to call + the Key Management service to generate a data encryption key or to encrypt or decrypt a data encryption key. + Please note only one of sse_customer_key_file|sse_customer_key|sse_kms_key_id is needed. + ''', + 'ispassword': False, + 'name': 'sse_kms_key_id', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + dict({ + 'help': 'AES256', + 'value': 'AES256', + }), + ]), + 'exclusive': False, + 'help': ''' + If using SSE-C, the optional header that specifies "AES256" as the encryption algorithm. + Object Storage supports "AES256" as the encryption algorithm. For more information, see + Using Your Own Keys for Server-Side Encryption (https://docs.cloud.oracle.com/Content/Object/Tasks/usingyourencryptionkeys.htm). + ''', + 'ispassword': False, + 'name': 'sse_customer_algorithm', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'oos', + }), + dict({ + 'description': 'Pcloud', + 'name': 'pcloud', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50438146.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': 'd0', + 'default_str': 'd0', + 'exclusive': False, + 'help': 'Fill in for rclone to use a non root folder as its starting point.', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'api.pcloud.com', + 'default_str': 'api.pcloud.com', + 'examples': list([ + dict({ + 'help': 'Original/US region', + 'value': 'api.pcloud.com', + }), + dict({ + 'help': 'EU region', + 'value': 'eapi.pcloud.com', + }), + ]), + 'exclusive': False, + 'help': ''' + Hostname to connect to. + + This is normally set when rclone initially does the oauth connection, + however you will need to set it by hand if you are using remote config + with rclone authorize. + + ''', + 'ispassword': False, + 'name': 'hostname', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Your pcloud username. + + This is only required when you want to use the cleanup command. Due to a bug + in the pcloud API the required API does not support OAuth authentication so + we have to rely on user password authentication for it. + ''', + 'ispassword': False, + 'name': 'username', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Your pcloud password.', + 'ispassword': True, + 'name': 'password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'pcloud', + }), + dict({ + 'description': 'PikPak', + 'name': 'pikpak', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Pikpak username.', + 'ispassword': False, + 'name': 'user', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Pikpak password.', + 'ispassword': True, + 'name': 'pass', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Device ID used for authorization.', + 'ispassword': False, + 'name': 'device_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/129.0', + 'default_str': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/129.0', + 'exclusive': False, + 'help': ''' + HTTP user agent for pikpak. + + Defaults to "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:129.0) Gecko/20100101 Firefox/129.0" or "--pikpak-user-agent" provided on command line. + ''', + 'ispassword': False, + 'name': 'user_agent', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + ID of the root folder. + Leave blank normally. + + Fill in for rclone to use a non root folder as its starting point. + + ''', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Send files to the trash instead of deleting permanently. + + Defaults to true, namely sending files to the trash. + Use `--pikpak-use-trash=false` to delete files permanently instead. + ''', + 'ispassword': False, + 'name': 'use_trash', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Only show files that are in the trash. + + This will show trashed files in their original directory structure. + ''', + 'ispassword': False, + 'name': 'trashed_only', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use original file links instead of media links. + + This avoids issues caused by invalid media links, but may reduce download speeds. + ''', + 'ispassword': False, + 'name': 'no_media_link', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 10485760.0, + 'default_str': '10Mi', + 'exclusive': False, + 'help': 'Files bigger than this will be cached on disk to calculate hash if required.', + 'ispassword': False, + 'name': 'hash_memory_limit', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 5242880.0, + 'default_str': '5Mi', + 'exclusive': False, + 'help': ''' + Chunk size for multipart uploads. + + Large files will be uploaded in chunks of this size. + + Note that this is stored in memory and there may be up to + "--transfers" * "--pikpak-upload-concurrency" chunks stored at once + in memory. + + If you are transferring large files over high-speed links and you have + enough memory, then increasing this will speed up the transfers. + + Rclone will automatically increase the chunk size when uploading a + large file of known size to stay below the 10,000 chunks limit. + + Increasing the chunk size decreases the accuracy of the progress + statistics displayed with "-P" flag. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 5.0, + 'default_str': '5', + 'exclusive': False, + 'help': ''' + Concurrency for multipart uploads. + + This is the number of chunks of the same file that are uploaded + concurrently for multipart uploads. + + Note that chunks are stored in memory and there may be up to + "--transfers" * "--pikpak-upload-concurrency" chunks stored at once + in memory. + + If you are uploading small numbers of large files over high-speed links + and these uploads do not fully utilize your bandwidth, then increasing + this may help to speed up the transfers. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 56829838.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,RightSpace,RightPeriod,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'pikpak', + }), + dict({ + 'description': 'Pixeldrain Filesystem', + 'name': 'pixeldrain', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + API key for your pixeldrain account. + Found on https://pixeldrain.com/user/api_keys. + ''', + 'ispassword': False, + 'name': 'api_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'me', + 'default_str': 'me', + 'exclusive': False, + 'help': ''' + Root of the filesystem to use. + + Set to 'me' to use your personal filesystem. Set to a shared directory ID to use a shared directory. + ''', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'https://pixeldrain.com/api', + 'default_str': 'https://pixeldrain.com/api', + 'exclusive': False, + 'help': ''' + The API endpoint to connect to. In the vast majority of cases it's fine to leave + this at default. It is only intended to be changed for testing purposes. + ''', + 'ispassword': False, + 'name': 'api_url', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'pixeldrain', + }), + dict({ + 'description': 'premiumize.me', + 'name': 'premiumizeme', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + OAuth Client Id. + + Leave blank normally. + ''', + 'ispassword': False, + 'name': 'client_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + OAuth Client Secret. + + Leave blank normally. + ''', + 'ispassword': False, + 'name': 'client_secret', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + API Key. + + This is not normally used - use oauth instead. + + ''', + 'ispassword': False, + 'name': 'api_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50438154.0, + 'default_str': 'Slash,DoubleQuote,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'premiumizeme', + }), + dict({ + 'description': 'Proton Drive', + 'name': 'protondrive', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The username of your proton account', + 'ispassword': False, + 'name': 'username', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The password of your proton account.', + 'ispassword': True, + 'name': 'password', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The mailbox password of your two-password proton account. + + For more information regarding the mailbox password, please check the + following official knowledge base article: + https://proton.me/support/the-difference-between-the-mailbox-password-and-login-password + + ''', + 'ispassword': True, + 'name': 'mailbox_password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The 2FA code + + The value can also be provided with --protondrive-2fa=000000 + + The 2FA code of your proton drive account if the account is set up with + two-factor authentication + ''', + 'ispassword': False, + 'name': '2fa', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Client uid key (internal use only)', + 'ispassword': False, + 'name': 'client_uid', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Client access token key (internal use only)', + 'ispassword': False, + 'name': 'client_access_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Client refresh token key (internal use only)', + 'ispassword': False, + 'name': 'client_refresh_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Client salted key pass key (internal use only)', + 'ispassword': False, + 'name': 'client_salted_key_pass', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 52559874.0, + 'default_str': 'Slash,LeftSpace,RightSpace,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Return the file size before encryption + + The size of the encrypted file will be different from (bigger than) the + original file size. Unless there is a reason to return the file size + after encryption is performed, otherwise, set this option to true, as + features like Open() which will need to be supplied with original content + size, will fail to operate properly + ''', + 'ispassword': False, + 'name': 'original_file_size', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 'macos-drive@1.0.0-alpha.1+rclone', + 'default_str': 'macos-drive@1.0.0-alpha.1+rclone', + 'exclusive': False, + 'help': ''' + The app version string + + The app version string indicates the client that is currently performing + the API request. This information is required and will be sent with every + API request. + ''', + 'ispassword': False, + 'name': 'app_version', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Create a new revision when filename conflict is detected + + When a file upload is cancelled or failed before completion, a draft will be + created and the subsequent upload of the same file to the same location will be + reported as a conflict. + + The value can also be set by --protondrive-replace-existing-draft=true + + If the option is set to true, the draft will be replaced and then the upload + operation will restart. If there are other clients also uploading at the same + file location at the same time, the behavior is currently unknown. Need to set + to true for integration tests. + If the option is set to false, an error "a draft exist - usually this means a + file is being uploaded at another client, or, there was a failed upload attempt" + will be returned, and no upload will happen. + ''', + 'ispassword': False, + 'name': 'replace_existing_draft', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Caches the files and folders metadata to reduce API calls + + Notice: If you are mounting ProtonDrive as a VFS, please disable this feature, + as the current implementation doesn't update or clear the cache when there are + external changes. + + The files and folders on ProtonDrive are represented as links with keyrings, + which can be cached to improve performance and be friendly to the API server. + + The cache is currently built for the case when the rclone is the only instance + performing operations to the mount point. The event system, which is the proton + API system that provides visibility of what has changed on the drive, is yet + to be implemented, so updates from other clients won’t be reflected in the + cache. Thus, if there are concurrent clients accessing the same mount point, + then we might have a problem with caching the stale data. + ''', + 'ispassword': False, + 'name': 'enable_caching', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'protondrive', + }), + dict({ + 'description': 'Put.io', + 'name': 'putio', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50438146.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'putio', + }), + dict({ + 'description': 'QingCloud Object Storage', + 'name': 'qingstor', + 'options': list([ + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'examples': list([ + dict({ + 'help': 'Enter QingStor credentials in the next step.', + 'value': 'false', + }), + dict({ + 'help': 'Get QingStor credentials from the environment (env vars or IAM).', + 'value': 'true', + }), + ]), + 'exclusive': False, + 'help': ''' + Get QingStor credentials from runtime. + + Only applies if access_key_id and secret_access_key is blank. + ''', + 'ispassword': False, + 'name': 'env_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + QingStor Access Key ID. + + Leave blank for anonymous access or runtime credentials. + ''', + 'ispassword': False, + 'name': 'access_key_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + QingStor Secret Access Key (password). + + Leave blank for anonymous access or runtime credentials. + ''', + 'ispassword': False, + 'name': 'secret_access_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Enter an endpoint URL to connection QingStor API. + + Leave blank will use the default value "https://qingstor.com:443". + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The Beijing (China) Three Zone. + Needs location constraint pek3a. + ''', + 'value': 'pek3a', + }), + dict({ + 'help': ''' + The Shanghai (China) First Zone. + Needs location constraint sh1a. + ''', + 'value': 'sh1a', + }), + dict({ + 'help': ''' + The Guangdong (China) Second Zone. + Needs location constraint gd2a. + ''', + 'value': 'gd2a', + }), + ]), + 'exclusive': False, + 'help': ''' + Zone to connect to. + + Default is "pek3a". + ''', + 'ispassword': False, + 'name': 'zone', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 3.0, + 'default_str': '3', + 'exclusive': False, + 'help': 'Number of connection retries.', + 'ispassword': False, + 'name': 'connection_retries', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 209715200.0, + 'default_str': '200Mi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to chunked upload. + + Any files larger than this will be uploaded in chunks of chunk_size. + The minimum is 0 and the maximum is 5 GiB. + ''', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 4194304.0, + 'default_str': '4Mi', + 'exclusive': False, + 'help': ''' + Chunk size to use for uploading. + + When uploading files larger than upload_cutoff they will be uploaded + as multipart uploads using this chunk size. + + Note that "--qingstor-upload-concurrency" chunks of this size are buffered + in memory per transfer. + + If you are transferring large files over high-speed links and you have + enough memory, then increasing this will speed up the transfers. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 1.0, + 'default_str': '1', + 'exclusive': False, + 'help': ''' + Concurrency for multipart uploads. + + This is the number of chunks of the same file that are uploaded + concurrently. + + NB if you set this to > 1 then the checksums of multipart uploads + become corrupted (the uploads themselves are not corrupted though). + + If you are uploading small numbers of large files over high-speed links + and these uploads do not fully utilize your bandwidth, then increasing + this may help to speed up the transfers. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 16842754.0, + 'default_str': 'Slash,Ctl,InvalidUtf8', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'qingstor', + }), + dict({ + 'description': 'Quatrix by Maytech', + 'name': 'quatrix', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'API key for accessing Quatrix account', + 'ispassword': False, + 'name': 'api_key', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Host name of Quatrix account', + 'ispassword': False, + 'name': 'host', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50438146.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '4s', + 'default_str': '4s', + 'exclusive': False, + 'help': 'Wanted upload time for one chunk', + 'ispassword': False, + 'name': 'effective_upload_time', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 10000000.0, + 'default_str': '9.537Mi', + 'exclusive': False, + 'help': 'The minimal size for one chunk', + 'ispassword': False, + 'name': 'minimal_chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 100000000.0, + 'default_str': '95.367Mi', + 'exclusive': False, + 'help': "The maximal summary for all chunks. It should not be less than 'transfers'*'minimal_chunk_size'", + 'ispassword': False, + 'name': 'maximal_summary_chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Delete files permanently rather than putting them into the trash', + 'ispassword': False, + 'name': 'hard_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Skip project folders in operations', + 'ispassword': False, + 'name': 'skip_project_folders', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'quatrix', + }), + dict({ + 'description': 'Amazon S3 Compliant Storage Providers including AWS, Alibaba, ArvanCloud, Ceph, ChinaMobile, Cloudflare, DigitalOcean, Dreamhost, Exaba, FlashBlade, GCS, HuaweiOBS, IBMCOS, IDrive, IONOS, LyveCloud, Leviia, Liara, Linode, Magalu, Mega, Minio, Netease, Outscale, Petabox, RackCorp, Rclone, Scaleway, SeaweedFS, Selectel, StackPath, Storj, Synology, TencentCOS, Wasabi, Qiniu and others', + 'name': 's3', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Amazon Web Services (AWS) S3', + 'value': 'AWS', + }), + dict({ + 'help': 'Alibaba Cloud Object Storage System (OSS) formerly Aliyun', + 'value': 'Alibaba', + }), + dict({ + 'help': 'Arvan Cloud Object Storage (AOS)', + 'value': 'ArvanCloud', + }), + dict({ + 'help': 'Ceph Object Storage', + 'value': 'Ceph', + }), + dict({ + 'help': 'China Mobile Ecloud Elastic Object Storage (EOS)', + 'value': 'ChinaMobile', + }), + dict({ + 'help': 'Cloudflare R2 Storage', + 'value': 'Cloudflare', + }), + dict({ + 'help': 'DigitalOcean Spaces', + 'value': 'DigitalOcean', + }), + dict({ + 'help': 'Dreamhost DreamObjects', + 'value': 'Dreamhost', + }), + dict({ + 'help': 'Exaba Object Storage', + 'value': 'Exaba', + }), + dict({ + 'help': 'Pure Storage FlashBlade Object Storage', + 'value': 'FlashBlade', + }), + dict({ + 'help': 'Google Cloud Storage', + 'value': 'GCS', + }), + dict({ + 'help': 'Huawei Object Storage Service', + 'value': 'HuaweiOBS', + }), + dict({ + 'help': 'IBM COS S3', + 'value': 'IBMCOS', + }), + dict({ + 'help': 'IDrive e2', + 'value': 'IDrive', + }), + dict({ + 'help': 'IONOS Cloud', + 'value': 'IONOS', + }), + dict({ + 'help': 'Seagate Lyve Cloud', + 'value': 'LyveCloud', + }), + dict({ + 'help': 'Leviia Object Storage', + 'value': 'Leviia', + }), + dict({ + 'help': 'Liara Object Storage', + 'value': 'Liara', + }), + dict({ + 'help': 'Linode Object Storage', + 'value': 'Linode', + }), + dict({ + 'help': 'Magalu Object Storage', + 'value': 'Magalu', + }), + dict({ + 'help': 'MEGA S4 Object Storage', + 'value': 'Mega', + }), + dict({ + 'help': 'Minio Object Storage', + 'value': 'Minio', + }), + dict({ + 'help': 'Netease Object Storage (NOS)', + 'value': 'Netease', + }), + dict({ + 'help': 'OUTSCALE Object Storage (OOS)', + 'value': 'Outscale', + }), + dict({ + 'help': 'Petabox Object Storage', + 'value': 'Petabox', + }), + dict({ + 'help': 'RackCorp Object Storage', + 'value': 'RackCorp', + }), + dict({ + 'help': 'Rclone S3 Server', + 'value': 'Rclone', + }), + dict({ + 'help': 'Scaleway Object Storage', + 'value': 'Scaleway', + }), + dict({ + 'help': 'SeaweedFS S3', + 'value': 'SeaweedFS', + }), + dict({ + 'help': 'Selectel Object Storage', + 'value': 'Selectel', + }), + dict({ + 'help': 'StackPath Object Storage', + 'value': 'StackPath', + }), + dict({ + 'help': 'Storj (S3 Compatible Gateway)', + 'value': 'Storj', + }), + dict({ + 'help': 'Synology C2 Object Storage', + 'value': 'Synology', + }), + dict({ + 'help': 'Tencent Cloud Object Storage (COS)', + 'value': 'TencentCOS', + }), + dict({ + 'help': 'Wasabi Object Storage', + 'value': 'Wasabi', + }), + dict({ + 'help': 'Qiniu Object Storage (Kodo)', + 'value': 'Qiniu', + }), + dict({ + 'help': 'Any other S3 compatible provider', + 'value': 'Other', + }), + dict({ + 'help': 'Switch Object Storage', + 'provider': '', + 'value': 'Switch', + }), + ]), + 'exclusive': False, + 'help': 'Choose your S3 provider.', + 'ispassword': False, + 'name': 'provider', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'examples': list([ + dict({ + 'help': 'Enter AWS credentials in the next step.', + 'value': 'false', + }), + dict({ + 'help': 'Get AWS credentials from the environment (env vars or IAM).', + 'value': 'true', + }), + ]), + 'exclusive': False, + 'help': ''' + Get AWS credentials from runtime (environment variables or EC2/ECS meta data if no env vars). + + Only applies if access_key_id and secret_access_key is blank. + ''', + 'ispassword': False, + 'name': 'env_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + AWS Access Key ID. + + Leave blank for anonymous access or runtime credentials. + ''', + 'ispassword': False, + 'name': 'access_key_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + AWS Secret Access Key (password). + + Leave blank for anonymous access or runtime credentials. + ''', + 'ispassword': False, + 'name': 'secret_access_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The default endpoint - a good choice if you are unsure. + US Region, Northern Virginia, or Pacific Northwest. + Leave location constraint empty. + ''', + 'value': 'us-east-1', + }), + dict({ + 'help': ''' + US East (Ohio) Region. + Needs location constraint us-east-2. + ''', + 'value': 'us-east-2', + }), + dict({ + 'help': ''' + US West (Northern California) Region. + Needs location constraint us-west-1. + ''', + 'value': 'us-west-1', + }), + dict({ + 'help': ''' + US West (Oregon) Region. + Needs location constraint us-west-2. + ''', + 'value': 'us-west-2', + }), + dict({ + 'help': ''' + Canada (Central) Region. + Needs location constraint ca-central-1. + ''', + 'value': 'ca-central-1', + }), + dict({ + 'help': ''' + EU (Ireland) Region. + Needs location constraint EU or eu-west-1. + ''', + 'value': 'eu-west-1', + }), + dict({ + 'help': ''' + EU (London) Region. + Needs location constraint eu-west-2. + ''', + 'value': 'eu-west-2', + }), + dict({ + 'help': ''' + EU (Paris) Region. + Needs location constraint eu-west-3. + ''', + 'value': 'eu-west-3', + }), + dict({ + 'help': ''' + EU (Stockholm) Region. + Needs location constraint eu-north-1. + ''', + 'value': 'eu-north-1', + }), + dict({ + 'help': ''' + EU (Milan) Region. + Needs location constraint eu-south-1. + ''', + 'value': 'eu-south-1', + }), + dict({ + 'help': ''' + EU (Frankfurt) Region. + Needs location constraint eu-central-1. + ''', + 'value': 'eu-central-1', + }), + dict({ + 'help': ''' + Asia Pacific (Singapore) Region. + Needs location constraint ap-southeast-1. + ''', + 'value': 'ap-southeast-1', + }), + dict({ + 'help': ''' + Asia Pacific (Sydney) Region. + Needs location constraint ap-southeast-2. + ''', + 'value': 'ap-southeast-2', + }), + dict({ + 'help': ''' + Asia Pacific (Tokyo) Region. + Needs location constraint ap-northeast-1. + ''', + 'value': 'ap-northeast-1', + }), + dict({ + 'help': ''' + Asia Pacific (Seoul). + Needs location constraint ap-northeast-2. + ''', + 'value': 'ap-northeast-2', + }), + dict({ + 'help': ''' + Asia Pacific (Osaka-Local). + Needs location constraint ap-northeast-3. + ''', + 'value': 'ap-northeast-3', + }), + dict({ + 'help': ''' + Asia Pacific (Mumbai). + Needs location constraint ap-south-1. + ''', + 'value': 'ap-south-1', + }), + dict({ + 'help': ''' + Asia Pacific (Hong Kong) Region. + Needs location constraint ap-east-1. + ''', + 'value': 'ap-east-1', + }), + dict({ + 'help': ''' + South America (Sao Paulo) Region. + Needs location constraint sa-east-1. + ''', + 'value': 'sa-east-1', + }), + dict({ + 'help': ''' + Israel (Tel Aviv) Region. + Needs location constraint il-central-1. + ''', + 'value': 'il-central-1', + }), + dict({ + 'help': ''' + Middle East (Bahrain) Region. + Needs location constraint me-south-1. + ''', + 'value': 'me-south-1', + }), + dict({ + 'help': ''' + Africa (Cape Town) Region. + Needs location constraint af-south-1. + ''', + 'value': 'af-south-1', + }), + dict({ + 'help': ''' + China (Beijing) Region. + Needs location constraint cn-north-1. + ''', + 'value': 'cn-north-1', + }), + dict({ + 'help': ''' + China (Ningxia) Region. + Needs location constraint cn-northwest-1. + ''', + 'value': 'cn-northwest-1', + }), + dict({ + 'help': ''' + AWS GovCloud (US-East) Region. + Needs location constraint us-gov-east-1. + ''', + 'value': 'us-gov-east-1', + }), + dict({ + 'help': ''' + AWS GovCloud (US) Region. + Needs location constraint us-gov-west-1. + ''', + 'value': 'us-gov-west-1', + }), + ]), + 'exclusive': False, + 'help': 'Region to connect to.', + 'ispassword': False, + 'name': 'region', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Global CDN (All locations) Region', + 'value': 'global', + }), + dict({ + 'help': 'Australia (All states)', + 'value': 'au', + }), + dict({ + 'help': 'NSW (Australia) Region', + 'value': 'au-nsw', + }), + dict({ + 'help': 'QLD (Australia) Region', + 'value': 'au-qld', + }), + dict({ + 'help': 'VIC (Australia) Region', + 'value': 'au-vic', + }), + dict({ + 'help': 'Perth (Australia) Region', + 'value': 'au-wa', + }), + dict({ + 'help': 'Manila (Philippines) Region', + 'value': 'ph', + }), + dict({ + 'help': 'Bangkok (Thailand) Region', + 'value': 'th', + }), + dict({ + 'help': 'HK (Hong Kong) Region', + 'value': 'hk', + }), + dict({ + 'help': 'Ulaanbaatar (Mongolia) Region', + 'value': 'mn', + }), + dict({ + 'help': 'Bishkek (Kyrgyzstan) Region', + 'value': 'kg', + }), + dict({ + 'help': 'Jakarta (Indonesia) Region', + 'value': 'id', + }), + dict({ + 'help': 'Tokyo (Japan) Region', + 'value': 'jp', + }), + dict({ + 'help': 'SG (Singapore) Region', + 'value': 'sg', + }), + dict({ + 'help': 'Frankfurt (Germany) Region', + 'value': 'de', + }), + dict({ + 'help': 'USA (AnyCast) Region', + 'value': 'us', + }), + dict({ + 'help': 'New York (USA) Region', + 'value': 'us-east-1', + }), + dict({ + 'help': 'Freemont (USA) Region', + 'value': 'us-west-1', + }), + dict({ + 'help': 'Auckland (New Zealand) Region', + 'value': 'nz', + }), + ]), + 'exclusive': False, + 'help': ''' + region - the location where your bucket will be created and your data stored. + + ''', + 'ispassword': False, + 'name': 'region', + 'provider': 'RackCorp', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Amsterdam, The Netherlands', + 'value': 'nl-ams', + }), + dict({ + 'help': 'Paris, France', + 'value': 'fr-par', + }), + dict({ + 'help': 'Warsaw, Poland', + 'value': 'pl-waw', + }), + ]), + 'exclusive': False, + 'help': 'Region to connect to.', + 'ispassword': False, + 'name': 'region', + 'provider': 'Scaleway', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'AF-Johannesburg', + 'value': 'af-south-1', + }), + dict({ + 'help': 'AP-Bangkok', + 'value': 'ap-southeast-2', + }), + dict({ + 'help': 'AP-Singapore', + 'value': 'ap-southeast-3', + }), + dict({ + 'help': 'CN East-Shanghai1', + 'value': 'cn-east-3', + }), + dict({ + 'help': 'CN East-Shanghai2', + 'value': 'cn-east-2', + }), + dict({ + 'help': 'CN North-Beijing1', + 'value': 'cn-north-1', + }), + dict({ + 'help': 'CN North-Beijing4', + 'value': 'cn-north-4', + }), + dict({ + 'help': 'CN South-Guangzhou', + 'value': 'cn-south-1', + }), + dict({ + 'help': 'CN-Hong Kong', + 'value': 'ap-southeast-1', + }), + dict({ + 'help': 'LA-Buenos Aires1', + 'value': 'sa-argentina-1', + }), + dict({ + 'help': 'LA-Lima1', + 'value': 'sa-peru-1', + }), + dict({ + 'help': 'LA-Mexico City1', + 'value': 'na-mexico-1', + }), + dict({ + 'help': 'LA-Santiago2', + 'value': 'sa-chile-1', + }), + dict({ + 'help': 'LA-Sao Paulo1', + 'value': 'sa-brazil-1', + }), + dict({ + 'help': 'RU-Moscow2', + 'value': 'ru-northwest-2', + }), + ]), + 'exclusive': False, + 'help': ''' + Region to connect to. - the location where your bucket will be created and your data stored. Need bo be same with your endpoint. + + ''', + 'ispassword': False, + 'name': 'region', + 'provider': 'HuaweiOBS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': "R2 buckets are automatically distributed across Cloudflare's data centers for low latency.", + 'value': 'auto', + }), + ]), + 'exclusive': False, + 'help': 'Region to connect to.', + 'ispassword': False, + 'name': 'region', + 'provider': 'Cloudflare', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The default endpoint - a good choice if you are unsure. + East China Region 1. + Needs location constraint cn-east-1. + ''', + 'value': 'cn-east-1', + }), + dict({ + 'help': ''' + East China Region 2. + Needs location constraint cn-east-2. + ''', + 'value': 'cn-east-2', + }), + dict({ + 'help': ''' + North China Region 1. + Needs location constraint cn-north-1. + ''', + 'value': 'cn-north-1', + }), + dict({ + 'help': ''' + South China Region 1. + Needs location constraint cn-south-1. + ''', + 'value': 'cn-south-1', + }), + dict({ + 'help': ''' + North America Region. + Needs location constraint us-north-1. + ''', + 'value': 'us-north-1', + }), + dict({ + 'help': ''' + Southeast Asia Region 1. + Needs location constraint ap-southeast-1. + ''', + 'value': 'ap-southeast-1', + }), + dict({ + 'help': ''' + Northeast Asia Region 1. + Needs location constraint ap-northeast-1. + ''', + 'value': 'ap-northeast-1', + }), + ]), + 'exclusive': False, + 'help': 'Region to connect to.', + 'ispassword': False, + 'name': 'region', + 'provider': 'Qiniu', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Frankfurt, Germany', + 'value': 'de', + }), + dict({ + 'help': 'Berlin, Germany', + 'value': 'eu-central-2', + }), + dict({ + 'help': 'Logrono, Spain', + 'value': 'eu-south-2', + }), + ]), + 'exclusive': False, + 'help': ''' + Region where your bucket will be created and your data stored. + + ''', + 'ispassword': False, + 'name': 'region', + 'provider': 'IONOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Paris, France', + 'value': 'eu-west-2', + }), + dict({ + 'help': 'New Jersey, USA', + 'value': 'us-east-2', + }), + dict({ + 'help': 'California, USA', + 'value': 'us-west-1', + }), + dict({ + 'help': 'SecNumCloud, Paris, France', + 'value': 'cloudgouv-eu-west-1', + }), + dict({ + 'help': 'Tokyo, Japan', + 'value': 'ap-northeast-1', + }), + ]), + 'exclusive': False, + 'help': ''' + Region where your bucket will be created and your data stored. + + ''', + 'ispassword': False, + 'name': 'region', + 'provider': 'Outscale', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'US East (N. Virginia)', + 'value': 'us-east-1', + }), + dict({ + 'help': 'Europe (Frankfurt)', + 'value': 'eu-central-1', + }), + dict({ + 'help': 'Asia Pacific (Singapore)', + 'value': 'ap-southeast-1', + }), + dict({ + 'help': 'Middle East (Bahrain)', + 'value': 'me-south-1', + }), + dict({ + 'help': 'South America (São Paulo)', + 'value': 'sa-east-1', + }), + ]), + 'exclusive': False, + 'help': ''' + Region where your bucket will be created and your data stored. + + ''', + 'ispassword': False, + 'name': 'region', + 'provider': 'Petabox', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Europe Region 1', + 'value': 'eu-001', + }), + dict({ + 'help': 'Europe Region 2', + 'value': 'eu-002', + }), + dict({ + 'help': 'US Region 1', + 'value': 'us-001', + }), + dict({ + 'help': 'US Region 2', + 'value': 'us-002', + }), + dict({ + 'help': 'Asia (Taiwan)', + 'value': 'tw-001', + }), + ]), + 'exclusive': False, + 'help': ''' + Region where your data stored. + + ''', + 'ispassword': False, + 'name': 'region', + 'provider': 'Synology', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'St. Petersburg', + 'value': 'ru-1', + }), + ]), + 'exclusive': False, + 'help': ''' + Region where your data stored. + + ''', + 'ispassword': False, + 'name': 'region', + 'provider': 'Selectel', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + Use this if unsure. + Will use v4 signatures and an empty region. + ''', + 'value': '', + }), + dict({ + 'help': ''' + Use this only if v4 signatures don't work. + E.g. pre Jewel/v10 CEPH. + ''', + 'value': 'other-v2-signature', + }), + ]), + 'exclusive': False, + 'help': ''' + Region to connect to. + + Leave blank if you are using an S3 clone and you don't have a region. + ''', + 'ispassword': False, + 'name': 'region', + 'provider': '!AWS,Alibaba,ArvanCloud,ChinaMobile,Cloudflare,FlashBlade,IONOS,Petabox,Liara,Linode,Magalu,Qiniu,RackCorp,Scaleway,Selectel,Storj,Synology,TencentCOS,HuaweiOBS,IDrive,Mega', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for S3 API. + + Leave blank if using AWS to use the default endpoint for the region. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The default endpoint - a good choice if you are unsure. + East China (Suzhou) + ''', + 'value': 'eos-wuxi-1.cmecloud.cn', + }), + dict({ + 'help': 'East China (Jinan)', + 'value': 'eos-jinan-1.cmecloud.cn', + }), + dict({ + 'help': 'East China (Hangzhou)', + 'value': 'eos-ningbo-1.cmecloud.cn', + }), + dict({ + 'help': 'East China (Shanghai-1)', + 'value': 'eos-shanghai-1.cmecloud.cn', + }), + dict({ + 'help': 'Central China (Zhengzhou)', + 'value': 'eos-zhengzhou-1.cmecloud.cn', + }), + dict({ + 'help': 'Central China (Changsha-1)', + 'value': 'eos-hunan-1.cmecloud.cn', + }), + dict({ + 'help': 'Central China (Changsha-2)', + 'value': 'eos-zhuzhou-1.cmecloud.cn', + }), + dict({ + 'help': 'South China (Guangzhou-2)', + 'value': 'eos-guangzhou-1.cmecloud.cn', + }), + dict({ + 'help': 'South China (Guangzhou-3)', + 'value': 'eos-dongguan-1.cmecloud.cn', + }), + dict({ + 'help': 'North China (Beijing-1)', + 'value': 'eos-beijing-1.cmecloud.cn', + }), + dict({ + 'help': 'North China (Beijing-2)', + 'value': 'eos-beijing-2.cmecloud.cn', + }), + dict({ + 'help': 'North China (Beijing-3)', + 'value': 'eos-beijing-4.cmecloud.cn', + }), + dict({ + 'help': 'North China (Huhehaote)', + 'value': 'eos-huhehaote-1.cmecloud.cn', + }), + dict({ + 'help': 'Southwest China (Chengdu)', + 'value': 'eos-chengdu-1.cmecloud.cn', + }), + dict({ + 'help': 'Southwest China (Chongqing)', + 'value': 'eos-chongqing-1.cmecloud.cn', + }), + dict({ + 'help': 'Southwest China (Guiyang)', + 'value': 'eos-guiyang-1.cmecloud.cn', + }), + dict({ + 'help': 'Nouthwest China (Xian)', + 'value': 'eos-xian-1.cmecloud.cn', + }), + dict({ + 'help': 'Yunnan China (Kunming)', + 'value': 'eos-yunnan.cmecloud.cn', + }), + dict({ + 'help': 'Yunnan China (Kunming-2)', + 'value': 'eos-yunnan-2.cmecloud.cn', + }), + dict({ + 'help': 'Tianjin China (Tianjin)', + 'value': 'eos-tianjin-1.cmecloud.cn', + }), + dict({ + 'help': 'Jilin China (Changchun)', + 'value': 'eos-jilin-1.cmecloud.cn', + }), + dict({ + 'help': 'Hubei China (Xiangyan)', + 'value': 'eos-hubei-1.cmecloud.cn', + }), + dict({ + 'help': 'Jiangxi China (Nanchang)', + 'value': 'eos-jiangxi-1.cmecloud.cn', + }), + dict({ + 'help': 'Gansu China (Lanzhou)', + 'value': 'eos-gansu-1.cmecloud.cn', + }), + dict({ + 'help': 'Shanxi China (Taiyuan)', + 'value': 'eos-shanxi-1.cmecloud.cn', + }), + dict({ + 'help': 'Liaoning China (Shenyang)', + 'value': 'eos-liaoning-1.cmecloud.cn', + }), + dict({ + 'help': 'Hebei China (Shijiazhuang)', + 'value': 'eos-hebei-1.cmecloud.cn', + }), + dict({ + 'help': 'Fujian China (Xiamen)', + 'value': 'eos-fujian-1.cmecloud.cn', + }), + dict({ + 'help': 'Guangxi China (Nanning)', + 'value': 'eos-guangxi-1.cmecloud.cn', + }), + dict({ + 'help': 'Anhui China (Huainan)', + 'value': 'eos-anhui-1.cmecloud.cn', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for China Mobile Ecloud Elastic Object Storage (EOS) API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'ChinaMobile', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The default endpoint - a good choice if you are unsure. + Tehran Iran (Simin) + ''', + 'value': 's3.ir-thr-at1.arvanstorage.ir', + }), + dict({ + 'help': 'Tabriz Iran (Shahriar)', + 'value': 's3.ir-tbz-sh1.arvanstorage.ir', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Arvan Cloud Object Storage (AOS) API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'ArvanCloud', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'US Cross Region Endpoint', + 'value': 's3.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Cross Region Dallas Endpoint', + 'value': 's3.dal.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Cross Region Washington DC Endpoint', + 'value': 's3.wdc.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Cross Region San Jose Endpoint', + 'value': 's3.sjc.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Cross Region Private Endpoint', + 'value': 's3.private.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Cross Region Dallas Private Endpoint', + 'value': 's3.private.dal.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Cross Region Washington DC Private Endpoint', + 'value': 's3.private.wdc.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Cross Region San Jose Private Endpoint', + 'value': 's3.private.sjc.us.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Region East Endpoint', + 'value': 's3.us-east.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Region East Private Endpoint', + 'value': 's3.private.us-east.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Region South Endpoint', + 'value': 's3.us-south.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'US Region South Private Endpoint', + 'value': 's3.private.us-south.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Endpoint', + 'value': 's3.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Frankfurt Endpoint', + 'value': 's3.fra.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Milan Endpoint', + 'value': 's3.mil.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Amsterdam Endpoint', + 'value': 's3.ams.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Private Endpoint', + 'value': 's3.private.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Frankfurt Private Endpoint', + 'value': 's3.private.fra.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Milan Private Endpoint', + 'value': 's3.private.mil.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Cross Region Amsterdam Private Endpoint', + 'value': 's3.private.ams.eu.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Great Britain Endpoint', + 'value': 's3.eu-gb.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Great Britain Private Endpoint', + 'value': 's3.private.eu-gb.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Region DE Endpoint', + 'value': 's3.eu-de.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'EU Region DE Private Endpoint', + 'value': 's3.private.eu-de.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional Endpoint', + 'value': 's3.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional Tokyo Endpoint', + 'value': 's3.tok.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional HongKong Endpoint', + 'value': 's3.hkg.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional Seoul Endpoint', + 'value': 's3.seo.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional Private Endpoint', + 'value': 's3.private.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional Tokyo Private Endpoint', + 'value': 's3.private.tok.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional HongKong Private Endpoint', + 'value': 's3.private.hkg.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Cross Regional Seoul Private Endpoint', + 'value': 's3.private.seo.ap.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Region Japan Endpoint', + 'value': 's3.jp-tok.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Region Japan Private Endpoint', + 'value': 's3.private.jp-tok.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Region Australia Endpoint', + 'value': 's3.au-syd.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'APAC Region Australia Private Endpoint', + 'value': 's3.private.au-syd.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Amsterdam Single Site Endpoint', + 'value': 's3.ams03.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Amsterdam Single Site Private Endpoint', + 'value': 's3.private.ams03.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Chennai Single Site Endpoint', + 'value': 's3.che01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Chennai Single Site Private Endpoint', + 'value': 's3.private.che01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Melbourne Single Site Endpoint', + 'value': 's3.mel01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Melbourne Single Site Private Endpoint', + 'value': 's3.private.mel01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Oslo Single Site Endpoint', + 'value': 's3.osl01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Oslo Single Site Private Endpoint', + 'value': 's3.private.osl01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Toronto Single Site Endpoint', + 'value': 's3.tor01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Toronto Single Site Private Endpoint', + 'value': 's3.private.tor01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Seoul Single Site Endpoint', + 'value': 's3.seo01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Seoul Single Site Private Endpoint', + 'value': 's3.private.seo01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Montreal Single Site Endpoint', + 'value': 's3.mon01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Montreal Single Site Private Endpoint', + 'value': 's3.private.mon01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Mexico Single Site Endpoint', + 'value': 's3.mex01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Mexico Single Site Private Endpoint', + 'value': 's3.private.mex01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'San Jose Single Site Endpoint', + 'value': 's3.sjc04.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'San Jose Single Site Private Endpoint', + 'value': 's3.private.sjc04.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Milan Single Site Endpoint', + 'value': 's3.mil01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Milan Single Site Private Endpoint', + 'value': 's3.private.mil01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Hong Kong Single Site Endpoint', + 'value': 's3.hkg02.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Hong Kong Single Site Private Endpoint', + 'value': 's3.private.hkg02.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Paris Single Site Endpoint', + 'value': 's3.par01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Paris Single Site Private Endpoint', + 'value': 's3.private.par01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Singapore Single Site Endpoint', + 'value': 's3.sng01.cloud-object-storage.appdomain.cloud', + }), + dict({ + 'help': 'Singapore Single Site Private Endpoint', + 'value': 's3.private.sng01.cloud-object-storage.appdomain.cloud', + }), + ]), + 'exclusive': False, + 'help': ''' + Endpoint for IBM COS S3 API. + + Specify if using an IBM COS On Premise. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'IBMCOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Frankfurt, Germany', + 'value': 's3-eu-central-1.ionoscloud.com', + }), + dict({ + 'help': 'Berlin, Germany', + 'value': 's3-eu-central-2.ionoscloud.com', + }), + dict({ + 'help': 'Logrono, Spain', + 'value': 's3-eu-south-2.ionoscloud.com', + }), + ]), + 'exclusive': False, + 'help': ''' + Endpoint for IONOS S3 Object Storage. + + Specify the endpoint from the same region. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'IONOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'US East (N. Virginia)', + 'value': 's3.petabox.io', + }), + dict({ + 'help': 'US East (N. Virginia)', + 'value': 's3.us-east-1.petabox.io', + }), + dict({ + 'help': 'Europe (Frankfurt)', + 'value': 's3.eu-central-1.petabox.io', + }), + dict({ + 'help': 'Asia Pacific (Singapore)', + 'value': 's3.ap-southeast-1.petabox.io', + }), + dict({ + 'help': 'Middle East (Bahrain)', + 'value': 's3.me-south-1.petabox.io', + }), + dict({ + 'help': 'South America (São Paulo)', + 'value': 's3.sa-east-1.petabox.io', + }), + ]), + 'exclusive': False, + 'help': ''' + Endpoint for Petabox S3 Object Storage. + + Specify the endpoint from the same region. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Petabox', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The default endpoint + Leviia + ''', + 'value': 's3.leviia.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Leviia Object Storage API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Leviia', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + The default endpoint + Iran + ''', + 'value': 'storage.iran.liara.space', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Liara Object Storage API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Liara', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Amsterdam (Netherlands), nl-ams-1', + 'value': 'nl-ams-1.linodeobjects.com', + }), + dict({ + 'help': 'Atlanta, GA (USA), us-southeast-1', + 'value': 'us-southeast-1.linodeobjects.com', + }), + dict({ + 'help': 'Chennai (India), in-maa-1', + 'value': 'in-maa-1.linodeobjects.com', + }), + dict({ + 'help': 'Chicago, IL (USA), us-ord-1', + 'value': 'us-ord-1.linodeobjects.com', + }), + dict({ + 'help': 'Frankfurt (Germany), eu-central-1', + 'value': 'eu-central-1.linodeobjects.com', + }), + dict({ + 'help': 'Jakarta (Indonesia), id-cgk-1', + 'value': 'id-cgk-1.linodeobjects.com', + }), + dict({ + 'help': 'London 2 (Great Britain), gb-lon-1', + 'value': 'gb-lon-1.linodeobjects.com', + }), + dict({ + 'help': 'Los Angeles, CA (USA), us-lax-1', + 'value': 'us-lax-1.linodeobjects.com', + }), + dict({ + 'help': 'Madrid (Spain), es-mad-1', + 'value': 'es-mad-1.linodeobjects.com', + }), + dict({ + 'help': 'Melbourne (Australia), au-mel-1', + 'value': 'au-mel-1.linodeobjects.com', + }), + dict({ + 'help': 'Miami, FL (USA), us-mia-1', + 'value': 'us-mia-1.linodeobjects.com', + }), + dict({ + 'help': 'Milan (Italy), it-mil-1', + 'value': 'it-mil-1.linodeobjects.com', + }), + dict({ + 'help': 'Newark, NJ (USA), us-east-1', + 'value': 'us-east-1.linodeobjects.com', + }), + dict({ + 'help': 'Osaka (Japan), jp-osa-1', + 'value': 'jp-osa-1.linodeobjects.com', + }), + dict({ + 'help': 'Paris (France), fr-par-1', + 'value': 'fr-par-1.linodeobjects.com', + }), + dict({ + 'help': 'São Paulo (Brazil), br-gru-1', + 'value': 'br-gru-1.linodeobjects.com', + }), + dict({ + 'help': 'Seattle, WA (USA), us-sea-1', + 'value': 'us-sea-1.linodeobjects.com', + }), + dict({ + 'help': 'Singapore, ap-south-1', + 'value': 'ap-south-1.linodeobjects.com', + }), + dict({ + 'help': 'Singapore 2, sg-sin-1', + 'value': 'sg-sin-1.linodeobjects.com', + }), + dict({ + 'help': 'Stockholm (Sweden), se-sto-1', + 'value': 'se-sto-1.linodeobjects.com', + }), + dict({ + 'help': 'Washington, DC, (USA), us-iad-1', + 'value': 'us-iad-1.linodeobjects.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Linode Object Storage API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Linode', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for Lyve Cloud S3 API. + Required when using an S3 clone. Please type in your LyveCloud endpoint. + Examples: + - s3.us-west-1.{account_name}.lyve.seagate.com (US West 1 - California) + - s3.eu-west-1.{account_name}.lyve.seagate.com (EU West 1 - Ireland) + ''', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'LyveCloud', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'São Paulo, SP (BR), br-se1', + 'value': 'br-se1.magaluobjects.com', + }), + dict({ + 'help': 'Fortaleza, CE (BR), br-ne1', + 'value': 'br-ne1.magaluobjects.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Magalu Object Storage API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Magalu', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Global Accelerate', + 'value': 'oss-accelerate.aliyuncs.com', + }), + dict({ + 'help': 'Global Accelerate (outside mainland China)', + 'value': 'oss-accelerate-overseas.aliyuncs.com', + }), + dict({ + 'help': 'East China 1 (Hangzhou)', + 'value': 'oss-cn-hangzhou.aliyuncs.com', + }), + dict({ + 'help': 'East China 2 (Shanghai)', + 'value': 'oss-cn-shanghai.aliyuncs.com', + }), + dict({ + 'help': 'North China 1 (Qingdao)', + 'value': 'oss-cn-qingdao.aliyuncs.com', + }), + dict({ + 'help': 'North China 2 (Beijing)', + 'value': 'oss-cn-beijing.aliyuncs.com', + }), + dict({ + 'help': 'North China 3 (Zhangjiakou)', + 'value': 'oss-cn-zhangjiakou.aliyuncs.com', + }), + dict({ + 'help': 'North China 5 (Hohhot)', + 'value': 'oss-cn-huhehaote.aliyuncs.com', + }), + dict({ + 'help': 'North China 6 (Ulanqab)', + 'value': 'oss-cn-wulanchabu.aliyuncs.com', + }), + dict({ + 'help': 'South China 1 (Shenzhen)', + 'value': 'oss-cn-shenzhen.aliyuncs.com', + }), + dict({ + 'help': 'South China 2 (Heyuan)', + 'value': 'oss-cn-heyuan.aliyuncs.com', + }), + dict({ + 'help': 'South China 3 (Guangzhou)', + 'value': 'oss-cn-guangzhou.aliyuncs.com', + }), + dict({ + 'help': 'West China 1 (Chengdu)', + 'value': 'oss-cn-chengdu.aliyuncs.com', + }), + dict({ + 'help': 'Hong Kong (Hong Kong)', + 'value': 'oss-cn-hongkong.aliyuncs.com', + }), + dict({ + 'help': 'US West 1 (Silicon Valley)', + 'value': 'oss-us-west-1.aliyuncs.com', + }), + dict({ + 'help': 'US East 1 (Virginia)', + 'value': 'oss-us-east-1.aliyuncs.com', + }), + dict({ + 'help': 'Southeast Asia Southeast 1 (Singapore)', + 'value': 'oss-ap-southeast-1.aliyuncs.com', + }), + dict({ + 'help': 'Asia Pacific Southeast 2 (Sydney)', + 'value': 'oss-ap-southeast-2.aliyuncs.com', + }), + dict({ + 'help': 'Southeast Asia Southeast 3 (Kuala Lumpur)', + 'value': 'oss-ap-southeast-3.aliyuncs.com', + }), + dict({ + 'help': 'Asia Pacific Southeast 5 (Jakarta)', + 'value': 'oss-ap-southeast-5.aliyuncs.com', + }), + dict({ + 'help': 'Asia Pacific Northeast 1 (Japan)', + 'value': 'oss-ap-northeast-1.aliyuncs.com', + }), + dict({ + 'help': 'Asia Pacific South 1 (Mumbai)', + 'value': 'oss-ap-south-1.aliyuncs.com', + }), + dict({ + 'help': 'Central Europe 1 (Frankfurt)', + 'value': 'oss-eu-central-1.aliyuncs.com', + }), + dict({ + 'help': 'West Europe (London)', + 'value': 'oss-eu-west-1.aliyuncs.com', + }), + dict({ + 'help': 'Middle East 1 (Dubai)', + 'value': 'oss-me-east-1.aliyuncs.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for OSS API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Alibaba', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'AF-Johannesburg', + 'value': 'obs.af-south-1.myhuaweicloud.com', + }), + dict({ + 'help': 'AP-Bangkok', + 'value': 'obs.ap-southeast-2.myhuaweicloud.com', + }), + dict({ + 'help': 'AP-Singapore', + 'value': 'obs.ap-southeast-3.myhuaweicloud.com', + }), + dict({ + 'help': 'CN East-Shanghai1', + 'value': 'obs.cn-east-3.myhuaweicloud.com', + }), + dict({ + 'help': 'CN East-Shanghai2', + 'value': 'obs.cn-east-2.myhuaweicloud.com', + }), + dict({ + 'help': 'CN North-Beijing1', + 'value': 'obs.cn-north-1.myhuaweicloud.com', + }), + dict({ + 'help': 'CN North-Beijing4', + 'value': 'obs.cn-north-4.myhuaweicloud.com', + }), + dict({ + 'help': 'CN South-Guangzhou', + 'value': 'obs.cn-south-1.myhuaweicloud.com', + }), + dict({ + 'help': 'CN-Hong Kong', + 'value': 'obs.ap-southeast-1.myhuaweicloud.com', + }), + dict({ + 'help': 'LA-Buenos Aires1', + 'value': 'obs.sa-argentina-1.myhuaweicloud.com', + }), + dict({ + 'help': 'LA-Lima1', + 'value': 'obs.sa-peru-1.myhuaweicloud.com', + }), + dict({ + 'help': 'LA-Mexico City1', + 'value': 'obs.na-mexico-1.myhuaweicloud.com', + }), + dict({ + 'help': 'LA-Santiago2', + 'value': 'obs.sa-chile-1.myhuaweicloud.com', + }), + dict({ + 'help': 'LA-Sao Paulo1', + 'value': 'obs.sa-brazil-1.myhuaweicloud.com', + }), + dict({ + 'help': 'RU-Moscow2', + 'value': 'obs.ru-northwest-2.myhuaweicloud.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for OBS API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'HuaweiOBS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Amsterdam Endpoint', + 'value': 's3.nl-ams.scw.cloud', + }), + dict({ + 'help': 'Paris Endpoint', + 'value': 's3.fr-par.scw.cloud', + }), + dict({ + 'help': 'Warsaw Endpoint', + 'value': 's3.pl-waw.scw.cloud', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Scaleway Object Storage.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Scaleway', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'US East Endpoint', + 'value': 's3.us-east-2.stackpathstorage.com', + }), + dict({ + 'help': 'US West Endpoint', + 'value': 's3.us-west-1.stackpathstorage.com', + }), + dict({ + 'help': 'EU Endpoint', + 'value': 's3.eu-central-1.stackpathstorage.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for StackPath Object Storage.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'StackPath', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Google Cloud Storage endpoint', + 'value': 'https://storage.googleapis.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Google Cloud Storage.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'GCS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Global Hosted Gateway', + 'value': 'gateway.storjshare.io', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Storj Gateway.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Storj', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'EU Endpoint 1', + 'value': 'eu-001.s3.synologyc2.net', + }), + dict({ + 'help': 'EU Endpoint 2', + 'value': 'eu-002.s3.synologyc2.net', + }), + dict({ + 'help': 'US Endpoint 1', + 'value': 'us-001.s3.synologyc2.net', + }), + dict({ + 'help': 'US Endpoint 2', + 'value': 'us-002.s3.synologyc2.net', + }), + dict({ + 'help': 'TW Endpoint 1', + 'value': 'tw-001.s3.synologyc2.net', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Synology C2 Object Storage API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Synology', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Beijing Region', + 'value': 'cos.ap-beijing.myqcloud.com', + }), + dict({ + 'help': 'Nanjing Region', + 'value': 'cos.ap-nanjing.myqcloud.com', + }), + dict({ + 'help': 'Shanghai Region', + 'value': 'cos.ap-shanghai.myqcloud.com', + }), + dict({ + 'help': 'Guangzhou Region', + 'value': 'cos.ap-guangzhou.myqcloud.com', + }), + dict({ + 'help': 'Nanjing Region', + 'value': 'cos.ap-nanjing.myqcloud.com', + }), + dict({ + 'help': 'Chengdu Region', + 'value': 'cos.ap-chengdu.myqcloud.com', + }), + dict({ + 'help': 'Chongqing Region', + 'value': 'cos.ap-chongqing.myqcloud.com', + }), + dict({ + 'help': 'Hong Kong (China) Region', + 'value': 'cos.ap-hongkong.myqcloud.com', + }), + dict({ + 'help': 'Singapore Region', + 'value': 'cos.ap-singapore.myqcloud.com', + }), + dict({ + 'help': 'Mumbai Region', + 'value': 'cos.ap-mumbai.myqcloud.com', + }), + dict({ + 'help': 'Seoul Region', + 'value': 'cos.ap-seoul.myqcloud.com', + }), + dict({ + 'help': 'Bangkok Region', + 'value': 'cos.ap-bangkok.myqcloud.com', + }), + dict({ + 'help': 'Tokyo Region', + 'value': 'cos.ap-tokyo.myqcloud.com', + }), + dict({ + 'help': 'Silicon Valley Region', + 'value': 'cos.na-siliconvalley.myqcloud.com', + }), + dict({ + 'help': 'Virginia Region', + 'value': 'cos.na-ashburn.myqcloud.com', + }), + dict({ + 'help': 'Toronto Region', + 'value': 'cos.na-toronto.myqcloud.com', + }), + dict({ + 'help': 'Frankfurt Region', + 'value': 'cos.eu-frankfurt.myqcloud.com', + }), + dict({ + 'help': 'Moscow Region', + 'value': 'cos.eu-moscow.myqcloud.com', + }), + dict({ + 'help': 'Use Tencent COS Accelerate Endpoint', + 'value': 'cos.accelerate.myqcloud.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Tencent COS API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'TencentCOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Global (AnyCast) Endpoint', + 'value': 's3.rackcorp.com', + }), + dict({ + 'help': 'Australia (Anycast) Endpoint', + 'value': 'au.s3.rackcorp.com', + }), + dict({ + 'help': 'Sydney (Australia) Endpoint', + 'value': 'au-nsw.s3.rackcorp.com', + }), + dict({ + 'help': 'Brisbane (Australia) Endpoint', + 'value': 'au-qld.s3.rackcorp.com', + }), + dict({ + 'help': 'Melbourne (Australia) Endpoint', + 'value': 'au-vic.s3.rackcorp.com', + }), + dict({ + 'help': 'Perth (Australia) Endpoint', + 'value': 'au-wa.s3.rackcorp.com', + }), + dict({ + 'help': 'Manila (Philippines) Endpoint', + 'value': 'ph.s3.rackcorp.com', + }), + dict({ + 'help': 'Bangkok (Thailand) Endpoint', + 'value': 'th.s3.rackcorp.com', + }), + dict({ + 'help': 'HK (Hong Kong) Endpoint', + 'value': 'hk.s3.rackcorp.com', + }), + dict({ + 'help': 'Ulaanbaatar (Mongolia) Endpoint', + 'value': 'mn.s3.rackcorp.com', + }), + dict({ + 'help': 'Bishkek (Kyrgyzstan) Endpoint', + 'value': 'kg.s3.rackcorp.com', + }), + dict({ + 'help': 'Jakarta (Indonesia) Endpoint', + 'value': 'id.s3.rackcorp.com', + }), + dict({ + 'help': 'Tokyo (Japan) Endpoint', + 'value': 'jp.s3.rackcorp.com', + }), + dict({ + 'help': 'SG (Singapore) Endpoint', + 'value': 'sg.s3.rackcorp.com', + }), + dict({ + 'help': 'Frankfurt (Germany) Endpoint', + 'value': 'de.s3.rackcorp.com', + }), + dict({ + 'help': 'USA (AnyCast) Endpoint', + 'value': 'us.s3.rackcorp.com', + }), + dict({ + 'help': 'New York (USA) Endpoint', + 'value': 'us-east-1.s3.rackcorp.com', + }), + dict({ + 'help': 'Freemont (USA) Endpoint', + 'value': 'us-west-1.s3.rackcorp.com', + }), + dict({ + 'help': 'Auckland (New Zealand) Endpoint', + 'value': 'nz.s3.rackcorp.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for RackCorp Object Storage.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'RackCorp', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'East China Endpoint 1', + 'value': 's3-cn-east-1.qiniucs.com', + }), + dict({ + 'help': 'East China Endpoint 2', + 'value': 's3-cn-east-2.qiniucs.com', + }), + dict({ + 'help': 'North China Endpoint 1', + 'value': 's3-cn-north-1.qiniucs.com', + }), + dict({ + 'help': 'South China Endpoint 1', + 'value': 's3-cn-south-1.qiniucs.com', + }), + dict({ + 'help': 'North America Endpoint 1', + 'value': 's3-us-north-1.qiniucs.com', + }), + dict({ + 'help': 'Southeast Asia Endpoint 1', + 'value': 's3-ap-southeast-1.qiniucs.com', + }), + dict({ + 'help': 'Northeast Asia Endpoint 1', + 'value': 's3-ap-northeast-1.qiniucs.com', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Qiniu Object Storage.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Qiniu', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Saint Petersburg', + 'value': 's3.ru-1.storage.selcloud.ru', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint for Selectel Object Storage.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Selectel', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Dream Objects endpoint', + 'provider': 'Dreamhost', + 'value': 'objects-us-east-1.dream.io', + }), + dict({ + 'help': 'DigitalOcean Spaces Sydney 1', + 'provider': 'DigitalOcean', + 'value': 'syd1.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces San Francisco 3', + 'provider': 'DigitalOcean', + 'value': 'sfo3.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces San Francisco 2', + 'provider': 'DigitalOcean', + 'value': 'sfo2.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces Frankfurt 1', + 'provider': 'DigitalOcean', + 'value': 'fra1.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces New York 3', + 'provider': 'DigitalOcean', + 'value': 'nyc3.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces Amsterdam 3', + 'provider': 'DigitalOcean', + 'value': 'ams3.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces Singapore 1', + 'provider': 'DigitalOcean', + 'value': 'sgp1.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces London 1', + 'provider': 'DigitalOcean', + 'value': 'lon1.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces Toronto 1', + 'provider': 'DigitalOcean', + 'value': 'tor1.digitaloceanspaces.com', + }), + dict({ + 'help': 'DigitalOcean Spaces Bangalore 1', + 'provider': 'DigitalOcean', + 'value': 'blr1.digitaloceanspaces.com', + }), + dict({ + 'help': 'SeaweedFS S3 localhost', + 'provider': 'SeaweedFS', + 'value': 'localhost:8333', + }), + dict({ + 'help': 'Outscale EU West 2 (Paris)', + 'provider': 'Outscale', + 'value': 'oos.eu-west-2.outscale.com', + }), + dict({ + 'help': 'Outscale US east 2 (New Jersey)', + 'provider': 'Outscale', + 'value': 'oos.us-east-2.outscale.com', + }), + dict({ + 'help': 'Outscale EU West 1 (California)', + 'provider': 'Outscale', + 'value': 'oos.us-west-1.outscale.com', + }), + dict({ + 'help': 'Outscale SecNumCloud (Paris)', + 'provider': 'Outscale', + 'value': 'oos.cloudgouv-eu-west-1.outscale.com', + }), + dict({ + 'help': 'Outscale AP Northeast 1 (Japan)', + 'provider': 'Outscale', + 'value': 'oos.ap-northeast-1.outscale.com', + }), + dict({ + 'help': 'Wasabi US East 1 (N. Virginia)', + 'provider': 'Wasabi', + 'value': 's3.wasabisys.com', + }), + dict({ + 'help': 'Wasabi US East 2 (N. Virginia)', + 'provider': 'Wasabi', + 'value': 's3.us-east-2.wasabisys.com', + }), + dict({ + 'help': 'Wasabi US Central 1 (Texas)', + 'provider': 'Wasabi', + 'value': 's3.us-central-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi US West 1 (Oregon)', + 'provider': 'Wasabi', + 'value': 's3.us-west-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi CA Central 1 (Toronto)', + 'provider': 'Wasabi', + 'value': 's3.ca-central-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi EU Central 1 (Amsterdam)', + 'provider': 'Wasabi', + 'value': 's3.eu-central-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi EU Central 2 (Frankfurt)', + 'provider': 'Wasabi', + 'value': 's3.eu-central-2.wasabisys.com', + }), + dict({ + 'help': 'Wasabi EU West 1 (London)', + 'provider': 'Wasabi', + 'value': 's3.eu-west-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi EU West 2 (Paris)', + 'provider': 'Wasabi', + 'value': 's3.eu-west-2.wasabisys.com', + }), + dict({ + 'help': 'Wasabi EU South 1 (Milan)', + 'provider': 'Wasabi', + 'value': 's3.eu-south-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi AP Northeast 1 (Tokyo) endpoint', + 'provider': 'Wasabi', + 'value': 's3.ap-northeast-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi AP Northeast 2 (Osaka) endpoint', + 'provider': 'Wasabi', + 'value': 's3.ap-northeast-2.wasabisys.com', + }), + dict({ + 'help': 'Wasabi AP Southeast 1 (Singapore)', + 'provider': 'Wasabi', + 'value': 's3.ap-southeast-1.wasabisys.com', + }), + dict({ + 'help': 'Wasabi AP Southeast 2 (Sydney)', + 'provider': 'Wasabi', + 'value': 's3.ap-southeast-2.wasabisys.com', + }), + dict({ + 'help': 'Liara Iran endpoint', + 'provider': 'Liara', + 'value': 'storage.iran.liara.space', + }), + dict({ + 'help': 'ArvanCloud Tehran Iran (Simin) endpoint', + 'provider': 'ArvanCloud', + 'value': 's3.ir-thr-at1.arvanstorage.ir', + }), + dict({ + 'help': 'ArvanCloud Tabriz Iran (Shahriar) endpoint', + 'provider': 'ArvanCloud', + 'value': 's3.ir-tbz-sh1.arvanstorage.ir', + }), + dict({ + 'help': 'Mega S4 eu-central-1 (Amsterdam)', + 'provider': 'Mega', + 'value': 's3.eu-central-1.s4.mega.io', + }), + dict({ + 'help': 'Mega S4 eu-central-2 (Bettembourg)', + 'provider': 'Mega', + 'value': 's3.eu-central-2.s4.mega.io', + }), + dict({ + 'help': 'Mega S4 ca-central-1 (Montreal)', + 'provider': 'Mega', + 'value': 's3.ca-central-1.s4.mega.io', + }), + dict({ + 'help': 'Mega S4 ca-west-1 (Vancouver)', + 'provider': 'Mega', + 'value': 's3.ca-west-1.s4.mega.io', + }), + ]), + 'exclusive': False, + 'help': ''' + Endpoint for S3 API. + + Required when using an S3 clone. + ''', + 'ispassword': False, + 'name': 'endpoint', + 'provider': '!AWS,ArvanCloud,IBMCOS,IDrive,IONOS,TencentCOS,HuaweiOBS,Alibaba,ChinaMobile,GCS,Liara,Linode,LyveCloud,Magalu,Scaleway,Selectel,StackPath,Storj,Synology,RackCorp,Qiniu,Petabox,Switch', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Empty for US Region, Northern Virginia, or Pacific Northwest', + 'value': '', + }), + dict({ + 'help': 'US East (Ohio) Region', + 'value': 'us-east-2', + }), + dict({ + 'help': 'US West (Northern California) Region', + 'value': 'us-west-1', + }), + dict({ + 'help': 'US West (Oregon) Region', + 'value': 'us-west-2', + }), + dict({ + 'help': 'Canada (Central) Region', + 'value': 'ca-central-1', + }), + dict({ + 'help': 'EU (Ireland) Region', + 'value': 'eu-west-1', + }), + dict({ + 'help': 'EU (London) Region', + 'value': 'eu-west-2', + }), + dict({ + 'help': 'EU (Paris) Region', + 'value': 'eu-west-3', + }), + dict({ + 'help': 'EU (Stockholm) Region', + 'value': 'eu-north-1', + }), + dict({ + 'help': 'EU (Milan) Region', + 'value': 'eu-south-1', + }), + dict({ + 'help': 'EU Region', + 'value': 'EU', + }), + dict({ + 'help': 'Asia Pacific (Singapore) Region', + 'value': 'ap-southeast-1', + }), + dict({ + 'help': 'Asia Pacific (Sydney) Region', + 'value': 'ap-southeast-2', + }), + dict({ + 'help': 'Asia Pacific (Tokyo) Region', + 'value': 'ap-northeast-1', + }), + dict({ + 'help': 'Asia Pacific (Seoul) Region', + 'value': 'ap-northeast-2', + }), + dict({ + 'help': 'Asia Pacific (Osaka-Local) Region', + 'value': 'ap-northeast-3', + }), + dict({ + 'help': 'Asia Pacific (Mumbai) Region', + 'value': 'ap-south-1', + }), + dict({ + 'help': 'Asia Pacific (Hong Kong) Region', + 'value': 'ap-east-1', + }), + dict({ + 'help': 'South America (Sao Paulo) Region', + 'value': 'sa-east-1', + }), + dict({ + 'help': 'Israel (Tel Aviv) Region', + 'value': 'il-central-1', + }), + dict({ + 'help': 'Middle East (Bahrain) Region', + 'value': 'me-south-1', + }), + dict({ + 'help': 'Africa (Cape Town) Region', + 'value': 'af-south-1', + }), + dict({ + 'help': 'China (Beijing) Region', + 'value': 'cn-north-1', + }), + dict({ + 'help': 'China (Ningxia) Region', + 'value': 'cn-northwest-1', + }), + dict({ + 'help': 'AWS GovCloud (US-East) Region', + 'value': 'us-gov-east-1', + }), + dict({ + 'help': 'AWS GovCloud (US) Region', + 'value': 'us-gov-west-1', + }), + ]), + 'exclusive': False, + 'help': ''' + Location constraint - must be set to match the Region. + + Used when creating buckets only. + ''', + 'ispassword': False, + 'name': 'location_constraint', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'East China (Suzhou)', + 'value': 'wuxi1', + }), + dict({ + 'help': 'East China (Jinan)', + 'value': 'jinan1', + }), + dict({ + 'help': 'East China (Hangzhou)', + 'value': 'ningbo1', + }), + dict({ + 'help': 'East China (Shanghai-1)', + 'value': 'shanghai1', + }), + dict({ + 'help': 'Central China (Zhengzhou)', + 'value': 'zhengzhou1', + }), + dict({ + 'help': 'Central China (Changsha-1)', + 'value': 'hunan1', + }), + dict({ + 'help': 'Central China (Changsha-2)', + 'value': 'zhuzhou1', + }), + dict({ + 'help': 'South China (Guangzhou-2)', + 'value': 'guangzhou1', + }), + dict({ + 'help': 'South China (Guangzhou-3)', + 'value': 'dongguan1', + }), + dict({ + 'help': 'North China (Beijing-1)', + 'value': 'beijing1', + }), + dict({ + 'help': 'North China (Beijing-2)', + 'value': 'beijing2', + }), + dict({ + 'help': 'North China (Beijing-3)', + 'value': 'beijing4', + }), + dict({ + 'help': 'North China (Huhehaote)', + 'value': 'huhehaote1', + }), + dict({ + 'help': 'Southwest China (Chengdu)', + 'value': 'chengdu1', + }), + dict({ + 'help': 'Southwest China (Chongqing)', + 'value': 'chongqing1', + }), + dict({ + 'help': 'Southwest China (Guiyang)', + 'value': 'guiyang1', + }), + dict({ + 'help': 'Nouthwest China (Xian)', + 'value': 'xian1', + }), + dict({ + 'help': 'Yunnan China (Kunming)', + 'value': 'yunnan', + }), + dict({ + 'help': 'Yunnan China (Kunming-2)', + 'value': 'yunnan2', + }), + dict({ + 'help': 'Tianjin China (Tianjin)', + 'value': 'tianjin1', + }), + dict({ + 'help': 'Jilin China (Changchun)', + 'value': 'jilin1', + }), + dict({ + 'help': 'Hubei China (Xiangyan)', + 'value': 'hubei1', + }), + dict({ + 'help': 'Jiangxi China (Nanchang)', + 'value': 'jiangxi1', + }), + dict({ + 'help': 'Gansu China (Lanzhou)', + 'value': 'gansu1', + }), + dict({ + 'help': 'Shanxi China (Taiyuan)', + 'value': 'shanxi1', + }), + dict({ + 'help': 'Liaoning China (Shenyang)', + 'value': 'liaoning1', + }), + dict({ + 'help': 'Hebei China (Shijiazhuang)', + 'value': 'hebei1', + }), + dict({ + 'help': 'Fujian China (Xiamen)', + 'value': 'fujian1', + }), + dict({ + 'help': 'Guangxi China (Nanning)', + 'value': 'guangxi1', + }), + dict({ + 'help': 'Anhui China (Huainan)', + 'value': 'anhui1', + }), + ]), + 'exclusive': False, + 'help': ''' + Location constraint - must match endpoint. + + Used when creating buckets only. + ''', + 'ispassword': False, + 'name': 'location_constraint', + 'provider': 'ChinaMobile', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Tehran Iran (Simin)', + 'value': 'ir-thr-at1', + }), + dict({ + 'help': 'Tabriz Iran (Shahriar)', + 'value': 'ir-tbz-sh1', + }), + ]), + 'exclusive': False, + 'help': ''' + Location constraint - must match endpoint. + + Used when creating buckets only. + ''', + 'ispassword': False, + 'name': 'location_constraint', + 'provider': 'ArvanCloud', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'US Cross Region Standard', + 'value': 'us-standard', + }), + dict({ + 'help': 'US Cross Region Vault', + 'value': 'us-vault', + }), + dict({ + 'help': 'US Cross Region Cold', + 'value': 'us-cold', + }), + dict({ + 'help': 'US Cross Region Flex', + 'value': 'us-flex', + }), + dict({ + 'help': 'US East Region Standard', + 'value': 'us-east-standard', + }), + dict({ + 'help': 'US East Region Vault', + 'value': 'us-east-vault', + }), + dict({ + 'help': 'US East Region Cold', + 'value': 'us-east-cold', + }), + dict({ + 'help': 'US East Region Flex', + 'value': 'us-east-flex', + }), + dict({ + 'help': 'US South Region Standard', + 'value': 'us-south-standard', + }), + dict({ + 'help': 'US South Region Vault', + 'value': 'us-south-vault', + }), + dict({ + 'help': 'US South Region Cold', + 'value': 'us-south-cold', + }), + dict({ + 'help': 'US South Region Flex', + 'value': 'us-south-flex', + }), + dict({ + 'help': 'EU Cross Region Standard', + 'value': 'eu-standard', + }), + dict({ + 'help': 'EU Cross Region Vault', + 'value': 'eu-vault', + }), + dict({ + 'help': 'EU Cross Region Cold', + 'value': 'eu-cold', + }), + dict({ + 'help': 'EU Cross Region Flex', + 'value': 'eu-flex', + }), + dict({ + 'help': 'Great Britain Standard', + 'value': 'eu-gb-standard', + }), + dict({ + 'help': 'Great Britain Vault', + 'value': 'eu-gb-vault', + }), + dict({ + 'help': 'Great Britain Cold', + 'value': 'eu-gb-cold', + }), + dict({ + 'help': 'Great Britain Flex', + 'value': 'eu-gb-flex', + }), + dict({ + 'help': 'APAC Standard', + 'value': 'ap-standard', + }), + dict({ + 'help': 'APAC Vault', + 'value': 'ap-vault', + }), + dict({ + 'help': 'APAC Cold', + 'value': 'ap-cold', + }), + dict({ + 'help': 'APAC Flex', + 'value': 'ap-flex', + }), + dict({ + 'help': 'Melbourne Standard', + 'value': 'mel01-standard', + }), + dict({ + 'help': 'Melbourne Vault', + 'value': 'mel01-vault', + }), + dict({ + 'help': 'Melbourne Cold', + 'value': 'mel01-cold', + }), + dict({ + 'help': 'Melbourne Flex', + 'value': 'mel01-flex', + }), + dict({ + 'help': 'Toronto Standard', + 'value': 'tor01-standard', + }), + dict({ + 'help': 'Toronto Vault', + 'value': 'tor01-vault', + }), + dict({ + 'help': 'Toronto Cold', + 'value': 'tor01-cold', + }), + dict({ + 'help': 'Toronto Flex', + 'value': 'tor01-flex', + }), + ]), + 'exclusive': False, + 'help': ''' + Location constraint - must match endpoint when using IBM Cloud Public. + + For on-prem COS, do not make a selection from this list, hit enter. + ''', + 'ispassword': False, + 'name': 'location_constraint', + 'provider': 'IBMCOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Global CDN Region', + 'value': 'global', + }), + dict({ + 'help': 'Australia (All locations)', + 'value': 'au', + }), + dict({ + 'help': 'NSW (Australia) Region', + 'value': 'au-nsw', + }), + dict({ + 'help': 'QLD (Australia) Region', + 'value': 'au-qld', + }), + dict({ + 'help': 'VIC (Australia) Region', + 'value': 'au-vic', + }), + dict({ + 'help': 'Perth (Australia) Region', + 'value': 'au-wa', + }), + dict({ + 'help': 'Manila (Philippines) Region', + 'value': 'ph', + }), + dict({ + 'help': 'Bangkok (Thailand) Region', + 'value': 'th', + }), + dict({ + 'help': 'HK (Hong Kong) Region', + 'value': 'hk', + }), + dict({ + 'help': 'Ulaanbaatar (Mongolia) Region', + 'value': 'mn', + }), + dict({ + 'help': 'Bishkek (Kyrgyzstan) Region', + 'value': 'kg', + }), + dict({ + 'help': 'Jakarta (Indonesia) Region', + 'value': 'id', + }), + dict({ + 'help': 'Tokyo (Japan) Region', + 'value': 'jp', + }), + dict({ + 'help': 'SG (Singapore) Region', + 'value': 'sg', + }), + dict({ + 'help': 'Frankfurt (Germany) Region', + 'value': 'de', + }), + dict({ + 'help': 'USA (AnyCast) Region', + 'value': 'us', + }), + dict({ + 'help': 'New York (USA) Region', + 'value': 'us-east-1', + }), + dict({ + 'help': 'Freemont (USA) Region', + 'value': 'us-west-1', + }), + dict({ + 'help': 'Auckland (New Zealand) Region', + 'value': 'nz', + }), + ]), + 'exclusive': False, + 'help': ''' + Location constraint - the location where your bucket will be located and your data stored. + + ''', + 'ispassword': False, + 'name': 'location_constraint', + 'provider': 'RackCorp', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'East China Region 1', + 'value': 'cn-east-1', + }), + dict({ + 'help': 'East China Region 2', + 'value': 'cn-east-2', + }), + dict({ + 'help': 'North China Region 1', + 'value': 'cn-north-1', + }), + dict({ + 'help': 'South China Region 1', + 'value': 'cn-south-1', + }), + dict({ + 'help': 'North America Region 1', + 'value': 'us-north-1', + }), + dict({ + 'help': 'Southeast Asia Region 1', + 'value': 'ap-southeast-1', + }), + dict({ + 'help': 'Northeast Asia Region 1', + 'value': 'ap-northeast-1', + }), + ]), + 'exclusive': False, + 'help': ''' + Location constraint - must be set to match the Region. + + Used when creating buckets only. + ''', + 'ispassword': False, + 'name': 'location_constraint', + 'provider': 'Qiniu', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Location constraint - must be set to match the Region. + + Leave blank if not sure. Used when creating buckets only. + ''', + 'ispassword': False, + 'name': 'location_constraint', + 'provider': '!AWS,Alibaba,ArvanCloud,HuaweiOBS,ChinaMobile,Cloudflare,FlashBlade,IBMCOS,IDrive,IONOS,Leviia,Liara,Linode,Magalu,Outscale,Qiniu,RackCorp,Scaleway,Selectel,StackPath,Storj,TencentCOS,Petabox,Mega', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + Owner gets Full_CONTROL. + No one else has access rights (default). + ''', + 'provider': 'TencentCOS', + 'value': 'default', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + No one else has access rights (default). + ''', + 'provider': '!IBMCOS,TencentCOS', + 'value': 'private', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AllUsers group gets READ access. + ''', + 'provider': '!IBMCOS', + 'value': 'public-read', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AllUsers group gets READ and WRITE access. + Granting this on a bucket is generally not recommended. + ''', + 'provider': '!IBMCOS', + 'value': 'public-read-write', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AuthenticatedUsers group gets READ access. + ''', + 'provider': '!IBMCOS', + 'value': 'authenticated-read', + }), + dict({ + 'help': ''' + Object owner gets FULL_CONTROL. + Bucket owner gets READ access. + If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + ''', + 'provider': '!IBMCOS,ChinaMobile', + 'value': 'bucket-owner-read', + }), + dict({ + 'help': ''' + Both the object owner and the bucket owner get FULL_CONTROL over the object. + If you specify this canned ACL when creating a bucket, Amazon S3 ignores it. + ''', + 'provider': '!IBMCOS,ChinaMobile', + 'value': 'bucket-owner-full-control', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + No one else has access rights (default). + This acl is available on IBM Cloud (Infra), IBM Cloud (Storage), On-Premise COS. + ''', + 'provider': 'IBMCOS', + 'value': 'private', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AllUsers group gets READ access. + This acl is available on IBM Cloud (Infra), IBM Cloud (Storage), On-Premise IBM COS. + ''', + 'provider': 'IBMCOS', + 'value': 'public-read', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AllUsers group gets READ and WRITE access. + This acl is available on IBM Cloud (Infra), On-Premise IBM COS. + ''', + 'provider': 'IBMCOS', + 'value': 'public-read-write', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AuthenticatedUsers group gets READ access. + Not supported on Buckets. + This acl is available on IBM Cloud (Infra) and On-Premise IBM COS. + ''', + 'provider': 'IBMCOS', + 'value': 'authenticated-read', + }), + ]), + 'exclusive': False, + 'help': ''' + Canned ACL used when creating buckets and storing or copying objects. + + This ACL is used for creating objects and if bucket_acl isn't set, for creating buckets too. + + For more info visit https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl + + Note that this ACL is applied when server-side copying objects as S3 + doesn't copy the ACL from the source but rather writes a fresh one. + + If the acl is an empty string then no X-Amz-Acl: header is added and + the default (private) will be used. + + ''', + 'ispassword': False, + 'name': 'acl', + 'provider': '!Storj,Selectel,Synology,Cloudflare,FlashBlade,Mega', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + No one else has access rights (default). + ''', + 'value': 'private', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AllUsers group gets READ access. + ''', + 'value': 'public-read', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AllUsers group gets READ and WRITE access. + Granting this on a bucket is generally not recommended. + ''', + 'value': 'public-read-write', + }), + dict({ + 'help': ''' + Owner gets FULL_CONTROL. + The AuthenticatedUsers group gets READ access. + ''', + 'value': 'authenticated-read', + }), + ]), + 'exclusive': False, + 'help': ''' + Canned ACL used when creating buckets. + + For more info visit https://docs.aws.amazon.com/AmazonS3/latest/dev/acl-overview.html#canned-acl + + Note that this ACL is applied when only when creating buckets. If it + isn't set then "acl" is used instead. + + If the "acl" and "bucket_acl" are empty strings then no X-Amz-Acl: + header is added and the default (private) will be used. + + ''', + 'ispassword': False, + 'name': 'bucket_acl', + 'provider': '!Storj,Selectel,Synology,Cloudflare,FlashBlade', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Enables requester pays option when interacting with S3 bucket.', + 'ispassword': False, + 'name': 'requester_pays', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + dict({ + 'help': 'AES256', + 'value': 'AES256', + }), + dict({ + 'help': 'aws:kms', + 'provider': '!ChinaMobile', + 'value': 'aws:kms', + }), + ]), + 'exclusive': False, + 'help': 'The server-side encryption algorithm used when storing this object in S3.', + 'ispassword': False, + 'name': 'server_side_encryption', + 'provider': 'AWS,Ceph,ChinaMobile,Minio', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + dict({ + 'help': 'AES256', + 'value': 'AES256', + }), + ]), + 'exclusive': False, + 'help': 'If using SSE-C, the server-side encryption algorithm used when storing this object in S3.', + 'ispassword': False, + 'name': 'sse_customer_algorithm', + 'provider': 'AWS,Ceph,ChinaMobile,Minio', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + dict({ + 'help': 'arn:aws:kms:*', + 'value': 'arn:aws:kms:us-east-1:*', + }), + ]), + 'exclusive': False, + 'help': 'If using KMS ID you must provide the ARN of Key.', + 'ispassword': False, + 'name': 'sse_kms_key_id', + 'provider': 'AWS,Ceph,Minio', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + To use SSE-C you may provide the secret encryption key used to encrypt/decrypt your data. + + Alternatively you can provide --sse-customer-key-base64. + ''', + 'ispassword': False, + 'name': 'sse_customer_key', + 'provider': 'AWS,Ceph,ChinaMobile,Minio', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + If using SSE-C you must provide the secret encryption key encoded in base64 format to encrypt/decrypt your data. + + Alternatively you can provide --sse-customer-key. + ''', + 'ispassword': False, + 'name': 'sse_customer_key_base64', + 'provider': 'AWS,Ceph,ChinaMobile,Minio', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'None', + 'value': '', + }), + ]), + 'exclusive': False, + 'help': ''' + If using SSE-C you may provide the secret encryption key MD5 checksum (optional). + + If you leave it blank, this is calculated automatically from the sse_customer_key provided. + + ''', + 'ispassword': False, + 'name': 'sse_customer_key_md5', + 'provider': 'AWS,Ceph,ChinaMobile,Minio', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Default', + 'value': '', + }), + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + dict({ + 'help': 'Reduced redundancy storage class', + 'value': 'REDUCED_REDUNDANCY', + }), + dict({ + 'help': 'Standard Infrequent Access storage class', + 'value': 'STANDARD_IA', + }), + dict({ + 'help': 'One Zone Infrequent Access storage class', + 'value': 'ONEZONE_IA', + }), + dict({ + 'help': 'Glacier Flexible Retrieval storage class', + 'value': 'GLACIER', + }), + dict({ + 'help': 'Glacier Deep Archive storage class', + 'value': 'DEEP_ARCHIVE', + }), + dict({ + 'help': 'Intelligent-Tiering storage class', + 'value': 'INTELLIGENT_TIERING', + }), + dict({ + 'help': 'Glacier Instant Retrieval storage class', + 'value': 'GLACIER_IR', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in S3.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Default', + 'value': '', + }), + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + dict({ + 'help': 'Archive storage mode', + 'value': 'GLACIER', + }), + dict({ + 'help': 'Infrequent access storage mode', + 'value': 'STANDARD_IA', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in OSS.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'Alibaba', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Default', + 'value': '', + }), + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + dict({ + 'help': 'Archive storage mode', + 'value': 'GLACIER', + }), + dict({ + 'help': 'Infrequent access storage mode', + 'value': 'STANDARD_IA', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in ChinaMobile.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'ChinaMobile', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in Liara', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'Liara', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in ArvanCloud.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'ArvanCloud', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + dict({ + 'help': 'Glacier Instant Retrieval storage class', + 'value': 'GLACIER_IR', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in Magalu.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'Magalu', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Default', + 'value': '', + }), + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + dict({ + 'help': 'Archive storage mode', + 'value': 'ARCHIVE', + }), + dict({ + 'help': 'Infrequent access storage mode', + 'value': 'STANDARD_IA', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in Tencent COS.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'TencentCOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Default.', + 'value': '', + }), + dict({ + 'help': ''' + The Standard class for any upload. + Suitable for on-demand content like streaming or CDN. + Available in all regions. + ''', + 'value': 'STANDARD', + }), + dict({ + 'help': ''' + Archived storage. + Prices are lower, but it needs to be restored first to be accessed. + Available in FR-PAR and NL-AMS regions. + ''', + 'value': 'GLACIER', + }), + dict({ + 'help': ''' + One Zone - Infrequent Access. + A good choice for storing secondary backup copies or easily re-creatable data. + Available in the FR-PAR region only. + ''', + 'value': 'ONEZONE_IA', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in S3.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'Scaleway', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Standard storage class', + 'value': 'STANDARD', + }), + dict({ + 'help': 'Infrequent access storage mode', + 'value': 'LINE', + }), + dict({ + 'help': 'Archive storage mode', + 'value': 'GLACIER', + }), + dict({ + 'help': 'Deep archive storage mode', + 'value': 'DEEP_ARCHIVE', + }), + ]), + 'exclusive': False, + 'help': 'The storage class to use when storing new objects in Qiniu.', + 'ispassword': False, + 'name': 'storage_class', + 'provider': 'Qiniu', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 209715200.0, + 'default_str': '200Mi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to chunked upload. + + Any files larger than this will be uploaded in chunks of chunk_size. + The minimum is 0 and the maximum is 5 GiB. + ''', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 5242880.0, + 'default_str': '5Mi', + 'exclusive': False, + 'help': ''' + Chunk size to use for uploading. + + When uploading files larger than upload_cutoff or files with unknown + size (e.g. from "rclone rcat" or uploaded with "rclone mount" or google + photos or google docs) they will be uploaded as multipart uploads + using this chunk size. + + Note that "--s3-upload-concurrency" chunks of this size are buffered + in memory per transfer. + + If you are transferring large files over high-speed links and you have + enough memory, then increasing this will speed up the transfers. + + Rclone will automatically increase the chunk size when uploading a + large file of known size to stay below the 10,000 chunks limit. + + Files of unknown size are uploaded with the configured + chunk_size. Since the default chunk size is 5 MiB and there can be at + most 10,000 chunks, this means that by default the maximum size of + a file you can stream upload is 48 GiB. If you wish to stream upload + larger files then you will need to increase chunk_size. + + Increasing the chunk size decreases the accuracy of the progress + statistics displayed with "-P" flag. Rclone treats chunk as sent when + it's buffered by the AWS SDK, when in fact it may still be uploading. + A bigger chunk size means a bigger AWS SDK buffer and progress + reporting more deviating from the truth. + + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 10000.0, + 'default_str': '10000', + 'exclusive': False, + 'help': ''' + Maximum number of parts in a multipart upload. + + This option defines the maximum number of multipart chunks to use + when doing a multipart upload. + + This can be useful if a service does not support the AWS S3 + specification of 10,000 chunks. + + Rclone will automatically increase the chunk size when uploading a + large file of a known size to stay below this number of chunks limit. + + ''', + 'ispassword': False, + 'name': 'max_upload_parts', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 4999610368.0, + 'default_str': '4.656Gi', + 'exclusive': False, + 'help': ''' + Cutoff for switching to multipart copy. + + Any files larger than this that need to be server-side copied will be + copied in chunks of this size. + + The minimum is 0 and the maximum is 5 GiB. + ''', + 'ispassword': False, + 'name': 'copy_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Don't store MD5 checksum with object metadata. + + Normally rclone will calculate the MD5 checksum of the input before + uploading it so it can add it to metadata on the object. This is great + for data integrity checking but can cause long delays for large files + to start uploading. + ''', + 'ispassword': False, + 'name': 'disable_checksum', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Path to the shared credentials file. + + If env_auth = true then rclone can use a shared credentials file. + + If this variable is empty rclone will look for the + "AWS_SHARED_CREDENTIALS_FILE" env variable. If the env value is empty + it will default to the current user's home directory. + + Linux/OSX: "$HOME/.aws/credentials" + Windows: "%USERPROFILE%\.aws\credentials" + + ''', + 'ispassword': False, + 'name': 'shared_credentials_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Profile to use in the shared credentials file. + + If env_auth = true then rclone can use a shared credentials file. This + variable controls which profile is used in that file. + + If empty it will default to the environment variable "AWS_PROFILE" or + "default" if that environment variable is also not set. + + ''', + 'ispassword': False, + 'name': 'profile', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'An AWS session token.', + 'ispassword': False, + 'name': 'session_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 4.0, + 'default_str': '4', + 'exclusive': False, + 'help': ''' + Concurrency for multipart uploads and copies. + + This is the number of chunks of the same file that are uploaded + concurrently for multipart uploads and copies. + + If you are uploading small numbers of large files over high-speed links + and these uploads do not fully utilize your bandwidth, then increasing + this may help to speed up the transfers. + ''', + 'ispassword': False, + 'name': 'upload_concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + If true use path style access if false use virtual hosted style. + + If this is true (the default) then rclone will use path style access, + if false then rclone will use virtual path style. See [the AWS S3 + docs](https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingBucket.html#access-bucket-intro) + for more info. + + Some providers (e.g. AWS, Aliyun OSS, Netease COS, or Tencent COS) require this set to + false - rclone will do this automatically based on the provider + setting. + + Note that if your bucket isn't a valid DNS name, i.e. has '.' or '_' in, + you'll need to set this to true. + + ''', + 'ispassword': False, + 'name': 'force_path_style', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If true use v2 authentication. + + If this is false (the default) then rclone will use v4 authentication. + If it is set then rclone will use v2 authentication. + + Use this only if v4 signatures don't work, e.g. pre Jewel/v10 CEPH. + ''', + 'ispassword': False, + 'name': 'v2_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If true use AWS S3 dual-stack endpoint (IPv6 support). + + See [AWS Docs on Dualstack Endpoints](https://docs.aws.amazon.com/AmazonS3/latest/userguide/dual-stack-endpoints.html) + ''', + 'ispassword': False, + 'name': 'use_dual_stack', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If true use the AWS S3 accelerated endpoint. + + See: [AWS S3 Transfer acceleration](https://docs.aws.amazon.com/AmazonS3/latest/dev/transfer-acceleration-examples.html) + ''', + 'ispassword': False, + 'name': 'use_accelerate_endpoint', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If true avoid calling abort upload on a failure, leaving all successfully uploaded parts on S3 for manual recovery. + + It should be set to true for resuming uploads across different sessions. + + WARNING: Storing parts of an incomplete multipart upload counts towards space usage on S3 and will add additional costs if not cleaned up. + + ''', + 'ispassword': False, + 'name': 'leave_parts_on_error', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 1000.0, + 'default_str': '1000', + 'exclusive': False, + 'help': ''' + Size of listing chunk (response list for each ListObject S3 request). + + This option is also known as "MaxKeys", "max-items", or "page-size" from the AWS S3 specification. + Most services truncate the response list to 1000 objects even if requested more than that. + In AWS S3 this is a global maximum and cannot be changed, see [AWS S3](https://docs.aws.amazon.com/cli/latest/reference/s3/ls.html). + In Ceph, this can be increased with the "rgw list buckets max chunk" option. + + ''', + 'ispassword': False, + 'name': 'list_chunk', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': ''' + Version of ListObjects to use: 1,2 or 0 for auto. + + When S3 originally launched it only provided the ListObjects call to + enumerate objects in a bucket. + + However in May 2016 the ListObjectsV2 call was introduced. This is + much higher performance and should be used if at all possible. + + If set to the default, 0, rclone will guess according to the provider + set which list objects method to call. If it guesses wrong, then it + may be set manually here. + + ''', + 'ispassword': False, + 'name': 'list_version', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Whether to url encode listings: true/false/unset + + Some providers support URL encoding listings and where this is + available this is more reliable when using control characters in file + names. If this is set to unset (the default) then rclone will choose + according to the provider setting what to apply, but you can override + rclone's choice here. + + ''', + 'ispassword': False, + 'name': 'list_url_encode', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set, don't attempt to check the bucket exists or create it. + + This can be useful when trying to minimise the number of transactions + rclone does if you know the bucket exists already. + + It can also be needed if the user you are using does not have bucket + creation permissions. Before v1.52.0 this would have passed silently + due to a bug. + + ''', + 'ispassword': False, + 'name': 'no_check_bucket', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set, don't HEAD uploaded objects to check integrity. + + This can be useful when trying to minimise the number of transactions + rclone does. + + Setting it means that if rclone receives a 200 OK message after + uploading an object with PUT then it will assume that it got uploaded + properly. + + In particular it will assume: + + - the metadata, including modtime, storage class and content type was as uploaded + - the size was as uploaded + + It reads the following items from the response for a single part PUT: + + - the MD5SUM + - The uploaded date + + For multipart uploads these items aren't read. + + If an source object of unknown length is uploaded then rclone **will** do a + HEAD request. + + Setting this flag increases the chance for undetected upload failures, + in particular an incorrect size, so it isn't recommended for normal + operation. In practice the chance of an undetected upload failure is + very small even with this flag. + + ''', + 'ispassword': False, + 'name': 'no_head', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'If set, do not do HEAD before GET when getting objects.', + 'ispassword': False, + 'name': 'no_head_object', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50331650.0, + 'default_str': 'Slash,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': 'How often internal memory buffer pools will be flushed. (no longer used)', + 'ispassword': False, + 'name': 'memory_pool_flush_time', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Whether to use mmap buffers in internal memory pool. (no longer used)', + 'ispassword': False, + 'name': 'memory_pool_use_mmap', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Disable usage of http2 for S3 backends. + + There is currently an unsolved issue with the s3 (specifically minio) backend + and HTTP/2. HTTP/2 is enabled by default for the s3 backend but can be + disabled here. When the issue is solved this flag will be removed. + + See: https://github.com/rclone/rclone/issues/4673, https://github.com/rclone/rclone/issues/3631 + + + ''', + 'ispassword': False, + 'name': 'disable_http2', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Custom endpoint for downloads. + This is usually set to a CloudFront CDN URL as AWS S3 offers + cheaper egress for data downloaded through the CloudFront network. + ''', + 'ispassword': False, + 'name': 'download_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Upload an empty object with a trailing slash when a new directory is created + + Empty folders are unsupported for bucket based remotes, this option creates an empty + object ending with "/", to persist the folder. + + ''', + 'ispassword': False, + 'name': 'directory_markers', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Whether to use ETag in multipart uploads for verification + + This should be true, false or left unset to use the default for the provider. + + ''', + 'ispassword': False, + 'name': 'use_multipart_etag', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Whether to use an unsigned payload in PutObject + + Rclone has to avoid the AWS SDK seeking the body when calling + PutObject. The AWS provider can add checksums in the trailer to avoid + seeking but other providers can't. + + This should be true, false or left unset to use the default for the provider. + + ''', + 'ispassword': False, + 'name': 'use_unsigned_payload', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Whether to use a presigned request or PutObject for single part uploads + + If this is false rclone will use PutObject from the AWS SDK to upload + an object. + + Versions of rclone < 1.59 use presigned requests to upload a single + part object and setting this flag to true will re-enable that + functionality. This shouldn't be necessary except in exceptional + circumstances or for testing. + + ''', + 'ispassword': False, + 'name': 'use_presigned_request', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Include old versions in directory listings.', + 'ispassword': False, + 'name': 'versions', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '0001-01-01T00:00:00Z', + 'default_str': 'off', + 'exclusive': False, + 'help': ''' + Show file versions as they were at the specified time. + + The parameter should be a date, "2006-01-02", datetime "2006-01-02 + 15:04:05" or a duration for that long ago, eg "100d" or "1h". + + Note that when using this no file write operations are permitted, + so you can't upload files or delete them. + + See [the time option docs](/docs/#time-option) for valid formats. + + ''', + 'ispassword': False, + 'name': 'version_at', + 'required': False, + 'sensitive': False, + 'type': 'Time', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Show deleted file markers when using versions. + + This shows deleted file markers in the listing when using versions. These will appear + as 0 size files. The only operation which can be performed on them is deletion. + + Deleting a delete marker will reveal the previous version. + + Deleted files will always show with a timestamp. + + ''', + 'ispassword': False, + 'name': 'version_deleted', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set this will decompress gzip encoded objects. + + It is possible to upload objects to S3 with "Content-Encoding: gzip" + set. Normally rclone will download these files as compressed objects. + + If this flag is set then rclone will decompress these files with + "Content-Encoding: gzip" as they are received. This means that rclone + can't check the size and hash but the file contents will be decompressed. + + ''', + 'ispassword': False, + 'name': 'decompress', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Set this if the backend might gzip objects. + + Normally providers will not alter objects when they are downloaded. If + an object was not uploaded with `Content-Encoding: gzip` then it won't + be set on download. + + However some providers may gzip objects even if they weren't uploaded + with `Content-Encoding: gzip` (eg Cloudflare). + + A symptom of this would be receiving errors like + + ERROR corrupted on transfer: sizes differ NNN vs MMM + + If you set this flag and rclone downloads an object with + Content-Encoding: gzip set and chunked transfer encoding, then rclone + will decompress the object on the fly. + + If this is set to unset (the default) then rclone will choose + according to the provider setting what to apply, but you can override + rclone's choice here. + + ''', + 'ispassword': False, + 'name': 'might_gzip', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Whether to send `Accept-Encoding: gzip` header. + + By default, rclone will append `Accept-Encoding: gzip` to the request to download + compressed objects whenever possible. + + However some providers such as Google Cloud Storage may alter the HTTP headers, breaking + the signature of the request. + + A symptom of this would be receiving errors like + + SignatureDoesNotMatch: The request signature we calculated does not match the signature you provided. + + In this case, you might want to try disabling this option. + + ''', + 'ispassword': False, + 'name': 'use_accept_encoding_gzip', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Suppress setting and reading of system metadata', + 'ispassword': False, + 'name': 'no_system_metadata', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for STS (deprecated). + + Leave blank if using AWS to use the default endpoint for the region. + ''', + 'ispassword': False, + 'name': 'sts_endpoint', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Set if rclone should report BucketAlreadyExists errors on bucket creation. + + At some point during the evolution of the s3 protocol, AWS started + returning an `AlreadyOwnedByYou` error when attempting to create a + bucket that the user already owned, rather than a + `BucketAlreadyExists` error. + + Unfortunately exactly what has been implemented by s3 clones is a + little inconsistent, some return `AlreadyOwnedByYou`, some return + `BucketAlreadyExists` and some return no error at all. + + This is important to rclone because it ensures the bucket exists by + creating it on quite a lot of operations (unless + `--s3-no-check-bucket` is used). + + If rclone knows the provider can return `AlreadyOwnedByYou` or returns + no error then it can report `BucketAlreadyExists` errors when the user + attempts to create a bucket not owned by them. Otherwise rclone + ignores the `BucketAlreadyExists` error which can lead to confusion. + + This should be automatically set correctly for all providers rclone + knows about - please make a bug report if not. + + ''', + 'ispassword': False, + 'name': 'use_already_exists', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Set if rclone should use multipart uploads. + + You can change this if you want to disable the use of multipart uploads. + This shouldn't be necessary in normal operation. + + This should be automatically set correctly for all providers rclone + knows about - please make a bug report if not. + + ''', + 'ispassword': False, + 'name': 'use_multipart_uploads', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Set if rclone should add x-id URL parameters. + + You can change this if you want to disable the AWS SDK from + adding x-id URL parameters. + + This shouldn't be necessary in normal operation. + + This should be automatically set correctly for all providers rclone + knows about - please make a bug report if not. + + ''', + 'ispassword': False, + 'name': 'use_x_id', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Set if rclone should include Accept-Encoding as part of the signature. + + You can change this if you want to stop rclone including + Accept-Encoding as part of the signature. + + This shouldn't be necessary in normal operation. + + This should be automatically set correctly for all providers rclone + knows about - please make a bug report if not. + + ''', + 'ispassword': False, + 'name': 'sign_accept_encoding', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Set to use AWS Directory Buckets + + If you are using an AWS Directory Bucket then set this flag. + + This will ensure no `Content-Md5` headers are sent and ensure `ETag` + headers are not interpreted as MD5 sums. `X-Amz-Meta-Md5chksum` will + be set on all objects whether single or multipart uploaded. + + This also sets `no_check_bucket = true`. + + Note that Directory Buckets do not support: + + - Versioning + - `Content-Encoding: gzip` + + Rclone limitations with Directory Buckets: + + - rclone does not support creating Directory Buckets with `rclone mkdir` + - ... or removing them with `rclone rmdir` yet + - Directory Buckets do not appear when doing `rclone lsf` at the top level. + - Rclone can't remove auto created directories yet. In theory this should + work with `directory_markers = true` but it doesn't. + - Directories don't seem to appear in recursive (ListR) listings. + + ''', + 'ispassword': False, + 'name': 'directory_bucket', + 'provider': 'AWS', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': 'Off', + 'exclusive': False, + 'help': ''' + Set to debug the SDK + + This can be set to a comma separated list of the following functions: + + - `Signing` + - `Retries` + - `Request` + - `RequestWithBody` + - `Response` + - `ResponseWithBody` + - `DeprecatedUsage` + - `RequestEventMessage` + - `ResponseEventMessage` + + Use `Off` to disable and `All` to set all log levels. You will need to + use `-vv` to see the debug level logs. + + ''', + 'ispassword': False, + 'name': 'sdk_log_mode', + 'required': False, + 'sensitive': False, + 'type': 'Bits', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'IBM API Key to be used to obtain IAM token', + 'ispassword': False, + 'name': 'ibm_api_key', + 'provider': 'IBMCOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'IBM service instance id', + 'ispassword': False, + 'name': 'ibm_resource_instance_id', + 'provider': 'IBMCOS', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'https://s3-zh.os.switch.ch', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Cloudian Hyperstore (ZH)', + 'provider': '', + 'value': 'https://s3-zh.os.switch.ch', + }), + dict({ + 'help': 'Ceph Object Gateway (ZH)', + 'provider': '', + 'value': 'https://os.zhdk.cloud.switch.ch', + }), + dict({ + 'help': 'Ceph Object Gateway (LS)', + 'provider': '', + 'value': 'https://os.unil.cloud.switch.ch', + }), + ]), + 'exclusive': True, + 'help': 'Endpoint for Switch S3 API.', + 'ispassword': False, + 'name': 'endpoint', + 'provider': 'Switch', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 's3', + }), + dict({ + 'description': 'seafile', + 'name': 'seafile', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Connect to cloud.seafile.com.', + 'value': 'https://cloud.seafile.com/', + }), + ]), + 'exclusive': False, + 'help': 'URL of seafile host to connect to.', + 'ispassword': False, + 'name': 'url', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'User name (usually email address).', + 'ispassword': False, + 'name': 'user', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Password.', + 'ispassword': True, + 'name': 'pass', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': "Two-factor authentication ('true' if the account has 2FA enabled).", + 'ispassword': False, + 'name': '2fa', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Name of the library. + + Leave blank to access all non-encrypted libraries. + ''', + 'ispassword': False, + 'name': 'library', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Library password (for encrypted libraries only). + + Leave blank if you pass it through the command line. + ''', + 'ispassword': True, + 'name': 'library_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': "Should rclone create a library if it doesn't exist.", + 'ispassword': False, + 'name': 'create_library', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Authentication token.', + 'ispassword': False, + 'name': 'auth_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 16850954.0, + 'default_str': 'Slash,DoubleQuote,BackSlash,Ctl,InvalidUtf8', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'seafile', + }), + dict({ + 'description': 'SSH/SFTP', + 'name': 'sftp', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + SSH host to connect to. + + E.g. "example.com". + ''', + 'ispassword': False, + 'name': 'host', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'vscode', + 'default_str': 'vscode', + 'exclusive': False, + 'help': 'SSH username.', + 'ispassword': False, + 'name': 'user', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 22.0, + 'default_str': '22', + 'exclusive': False, + 'help': 'SSH port number.', + 'ispassword': False, + 'name': 'port', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'SSH password, leave blank to use ssh-agent.', + 'ispassword': True, + 'name': 'pass', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Raw PEM-encoded private key. + + Note that this should be on a single line with line endings replaced with '\n', eg + + key_pem = -----BEGIN RSA PRIVATE KEY-----\nMaMbaIXtE\n0gAMbMbaSsd\nMbaass\n-----END RSA PRIVATE KEY----- + + This will generate the single line correctly: + + awk '{printf "%s\\n", $0}' < ~/.ssh/id_rsa + + If specified, it will override the key_file parameter. + ''', + 'ispassword': False, + 'name': 'key_pem', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Path to PEM-encoded private key file. + + Leave blank or set key-use-agent to use ssh-agent. + + Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. + ''', + 'ispassword': False, + 'name': 'key_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The passphrase to decrypt the PEM-encoded private key file. + + Only PEM encrypted key files (old OpenSSH format) are supported. Encrypted keys + in the new OpenSSH format can't be used. + ''', + 'ispassword': True, + 'name': 'key_file_pass', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + SSH public certificate for public certificate based authentication. + Set this if you have a signed certificate you want to use for authentication. + If specified will override pubkey_file. + ''', + 'ispassword': False, + 'name': 'pubkey', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Optional path to public key file. + + Set this if you have a signed certificate you want to use for authentication. + + Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. + ''', + 'ispassword': False, + 'name': 'pubkey_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': "Use OpenSSH's known_hosts file.", + 'value': '~/.ssh/known_hosts', + }), + ]), + 'exclusive': False, + 'help': ''' + Optional path to known_hosts file. + + Set this value to enable server host key validation. + + Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. + ''', + 'ispassword': False, + 'name': 'known_hosts_file', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + When set forces the usage of the ssh-agent. + + When key-file is also set, the ".pub" file of the specified key-file is read and only the associated key is + requested from the ssh-agent. This allows to avoid `Too many authentication failures for *username*` errors + when the ssh-agent contains many keys. + ''', + 'ispassword': False, + 'name': 'key_use_agent', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'examples': list([ + dict({ + 'help': 'Use default Cipher list.', + 'value': 'false', + }), + dict({ + 'help': 'Enables the use of the aes128-cbc cipher and diffie-hellman-group-exchange-sha256, diffie-hellman-group-exchange-sha1 key exchange.', + 'value': 'true', + }), + ]), + 'exclusive': False, + 'help': ''' + Enable the use of insecure ciphers and key exchange methods. + + This enables the use of the following insecure ciphers and key exchange methods: + + - aes128-cbc + - aes192-cbc + - aes256-cbc + - 3des-cbc + - diffie-hellman-group-exchange-sha256 + - diffie-hellman-group-exchange-sha1 + + Those algorithms are insecure and may allow plaintext data to be recovered by an attacker. + + This must be false if you use either ciphers or key_exchange advanced options. + + ''', + 'ispassword': False, + 'name': 'use_insecure_cipher', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Disable the execution of SSH commands to determine if remote file hashing is available. + + Leave blank or set to false to enable hashing (recommended), set to true to disable hashing. + ''', + 'ispassword': False, + 'name': 'disable_hashcheck', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Allow asking for SFTP password when needed. + + If this is set and no password is supplied then rclone will: + - ask for a password + - not contact the ssh agent + + ''', + 'ispassword': False, + 'name': 'ask_password', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Override path used by SSH shell commands. + + This allows checksum calculation when SFTP and SSH paths are + different. This issue affects among others Synology NAS boxes. + + E.g. if shared folders can be found in directories representing volumes: + + rclone sync /home/local/directory remote:/directory --sftp-path-override /volume2/directory + + E.g. if home directory can be found in a shared folder called "home": + + rclone sync /home/local/directory remote:/home/directory --sftp-path-override /volume1/homes/USER/directory + + To specify only the path to the SFTP remote's root, and allow rclone to add any relative subpaths automatically (including unwrapping/decrypting remotes as necessary), add the '@' character to the beginning of the path. + + E.g. the first example above could be rewritten as: + + rclone sync /home/local/directory remote:/directory --sftp-path-override @/volume2 + + Note that when using this method with Synology "home" folders, the full "/homes/USER" path should be specified instead of "/home". + + E.g. the second example above should be rewritten as: + + rclone sync /home/local/directory remote:/homes/USER/directory --sftp-path-override @/volume1 + ''', + 'ispassword': False, + 'name': 'path_override', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': 'Set the modified time on the remote if set.', + 'ispassword': False, + 'name': 'set_modtime', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'No shell access', + 'value': 'none', + }), + dict({ + 'help': 'Unix shell', + 'value': 'unix', + }), + dict({ + 'help': 'PowerShell', + 'value': 'powershell', + }), + dict({ + 'help': 'Windows Command Prompt', + 'value': 'cmd', + }), + ]), + 'exclusive': False, + 'help': ''' + The type of SSH shell on remote server, if any. + + Leave blank for autodetect. + ''', + 'ispassword': False, + 'name': 'shell_type', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The command used to read md5 hashes. + + Leave blank for autodetect. + ''', + 'ispassword': False, + 'name': 'md5sum_command', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The command used to read sha1 hashes. + + Leave blank for autodetect. + ''', + 'ispassword': False, + 'name': 'sha1sum_command', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Set to skip any symlinks and any other non regular files.', + 'ispassword': False, + 'name': 'skip_links', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 'sftp', + 'default_str': 'sftp', + 'exclusive': False, + 'help': 'Specifies the SSH2 subsystem on the remote host.', + 'ispassword': False, + 'name': 'subsystem', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Specifies the path or command to run a sftp server on the remote host. + + The subsystem option is ignored when server_command is defined. + + If adding server_command to the configuration file please note that + it should not be enclosed in quotes, since that will make rclone fail. + + A working example is: + + [remote_name] + type = sftp + server_command = sudo /usr/libexec/openssh/sftp-server + ''', + 'ispassword': False, + 'name': 'server_command', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set use fstat instead of stat. + + Some servers limit the amount of open files and calling Stat after opening + the file will throw an error from the server. Setting this flag will call + Fstat instead of Stat which is called on an already open file handle. + + It has been found that this helps with IBM Sterling SFTP servers which have + "extractability" level set to 1 which means only 1 file can be opened at + any given time. + + ''', + 'ispassword': False, + 'name': 'use_fstat', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set don't use concurrent reads. + + Normally concurrent reads are safe to use and not using them will + degrade performance, so this option is disabled by default. + + Some servers limit the amount number of times a file can be + downloaded. Using concurrent reads can trigger this limit, so if you + have a server which returns + + Failed to copy: file does not exist + + Then you may need to enable this flag. + + If concurrent reads are disabled, the use_fstat option is ignored. + + ''', + 'ispassword': False, + 'name': 'disable_concurrent_reads', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If set don't use concurrent writes. + + Normally rclone uses concurrent writes to upload files. This improves + the performance greatly, especially for distant servers. + + This option disables concurrent writes should that be necessary. + + ''', + 'ispassword': False, + 'name': 'disable_concurrent_writes', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': ''' + Max time before closing idle connections. + + If no connections have been returned to the connection pool in the time + given, rclone will empty the connection pool. + + Set to 0 to keep connections indefinitely. + + ''', + 'ispassword': False, + 'name': 'idle_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 32768.0, + 'default_str': '32Ki', + 'exclusive': False, + 'help': ''' + Upload and download chunk size. + + This controls the maximum size of payload in SFTP protocol packets. + The RFC limits this to 32768 bytes (32k), which is the default. However, + a lot of servers support larger sizes, typically limited to a maximum + total package size of 256k, and setting it larger will increase transfer + speed dramatically on high latency links. This includes OpenSSH, and, + for example, using the value of 255k works well, leaving plenty of room + for overhead while still being within a total packet size of 256k. + + Make sure to test thoroughly before using a value higher than 32k, + and only use it if you always connect to the same server or after + sufficiently broad testing. If you get errors such as + "failed to send packet payload: EOF", lots of "connection lost", + or "corrupted on transfer", when copying a larger file, try lowering + the value. The server run by [rclone serve sftp](/commands/rclone_serve_sftp) + sends packets with standard 32k maximum payload so you must not + set a different chunk_size when downloading files, but it accepts + packets up to the 256k total size, so for uploads the chunk_size + can be set as for the OpenSSH example above. + + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 64.0, + 'default_str': '64', + 'exclusive': False, + 'help': ''' + The maximum number of outstanding requests for one file + + This controls the maximum number of outstanding requests for one file. + Increasing it will increase throughput on high latency links at the + cost of using more memory. + + ''', + 'ispassword': False, + 'name': 'concurrency', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': ''' + Maximum number of SFTP simultaneous connections, 0 for unlimited. + + Note that setting this is very likely to cause deadlocks so it should + be used with care. + + If you are doing a sync or copy then make sure connections is one more + than the sum of `--transfers` and `--checkers`. + + If you use `--check-first` then it just needs to be one more than the + maximum of `--checkers` and `--transfers`. + + So for `connections 3` you'd use `--checkers 2 --transfers 2 + --check-first` or `--checkers 1 --transfers 1`. + + + ''', + 'ispassword': False, + 'name': 'connections', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Environment variables to pass to sftp and commands + + Set environment variables in the form: + + VAR=value + + to be passed to the sftp client and to any commands run (eg md5sum). + + Pass multiple variables space separated, eg + + VAR1=value VAR2=value + + and pass variables with spaces in quotes, eg + + "VAR3=value with space" "VAR4=value with space" VAR5=nospacehere + + + ''', + 'ispassword': False, + 'name': 'set_env', + 'required': False, + 'sensitive': False, + 'type': 'SpaceSepList', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Space separated list of ciphers to be used for session encryption, ordered by preference. + + At least one must match with server configuration. This can be checked for example using ssh -Q cipher. + + This must not be set if use_insecure_cipher is true. + + Example: + + aes128-ctr aes192-ctr aes256-ctr aes128-gcm@openssh.com aes256-gcm@openssh.com + + ''', + 'ispassword': False, + 'name': 'ciphers', + 'required': False, + 'sensitive': False, + 'type': 'SpaceSepList', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Space separated list of key exchange algorithms, ordered by preference. + + At least one must match with server configuration. This can be checked for example using ssh -Q kex. + + This must not be set if use_insecure_cipher is true. + + Example: + + sntrup761x25519-sha512@openssh.com curve25519-sha256 curve25519-sha256@libssh.org ecdh-sha2-nistp256 + + ''', + 'ispassword': False, + 'name': 'key_exchange', + 'required': False, + 'sensitive': False, + 'type': 'SpaceSepList', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Space separated list of MACs (message authentication code) algorithms, ordered by preference. + + At least one must match with server configuration. This can be checked for example using ssh -Q mac. + + Example: + + umac-64-etm@openssh.com umac-128-etm@openssh.com hmac-sha2-256-etm@openssh.com + + ''', + 'ispassword': False, + 'name': 'macs', + 'required': False, + 'sensitive': False, + 'type': 'SpaceSepList', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Space separated list of host key algorithms, ordered by preference. + + At least one must match with server configuration. This can be checked for example using ssh -Q HostKeyAlgorithms. + + Note: This can affect the outcome of key negotiation with the server even if server host key validation is not enabled. + + Example: + + ssh-ed25519 ssh-rsa ssh-dss + + ''', + 'ispassword': False, + 'name': 'host_key_algorithms', + 'required': False, + 'sensitive': False, + 'type': 'SpaceSepList', + }), + dict({ + 'advanced': False, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Path and arguments to external ssh binary. + + Normally rclone will use its internal ssh library to connect to the + SFTP server. However it does not implement all possible ssh options so + it may be desirable to use an external ssh binary. + + Rclone ignores all the internal config if you use this option and + expects you to configure the ssh binary with the user/host/port and + any other options you need. + + **Important** The ssh command must log in without asking for a + password so needs to be configured with keys or certificates. + + Rclone will run the command supplied either with the additional + arguments "-s sftp" to access the SFTP subsystem or with commands such + as "md5sum /path/to/file" appended to read checksums. + + Any arguments with spaces in should be surrounded by "double quotes". + + An example setting might be: + + ssh -o ServerAliveInterval=20 user@example.com + + Note that when using an external ssh binary rclone makes a new ssh + connection for every hash it calculates. + + ''', + 'ispassword': False, + 'name': 'ssh', + 'required': False, + 'sensitive': False, + 'type': 'SpaceSepList', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Socks 5 proxy host. + + Supports the format user:pass@host:port, user@host:port, host:port. + + Example: + + myUser:myPass@localhost:9005 + + ''', + 'ispassword': False, + 'name': 'socks_proxy', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + URL for HTTP CONNECT proxy + + Set this to a URL for an HTTP proxy which supports the HTTP CONNECT verb. + + ''', + 'ispassword': False, + 'name': 'http_proxy', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Set to enable server side copies using hardlinks. + + The SFTP protocol does not define a copy command so normally server + side copies are not allowed with the sftp backend. + + However the SFTP protocol does support hardlinking, and if you enable + this flag then the sftp backend will support server side copies. These + will be implemented by doing a hardlink from the source to the + destination. + + Not all sftp servers support this. + + Note that hardlinking two files together will use no additional space + as the source and the destination will be the same file. + + This feature may be useful backups made with --copy-dest. + ''', + 'ispassword': False, + 'name': 'copy_is_hardlink', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'sftp', + }), + dict({ + 'description': 'Citrix Sharefile', + 'name': 'sharefile', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 134217728.0, + 'default_str': '128Mi', + 'exclusive': False, + 'help': 'Cutoff for switching to multipart upload.', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Access the Personal Folders (default).', + 'value': '', + }), + dict({ + 'help': 'Access the Favorites folder.', + 'value': 'favorites', + }), + dict({ + 'help': 'Access all the shared folders.', + 'value': 'allshared', + }), + dict({ + 'help': 'Access all the individual connectors.', + 'value': 'connectors', + }), + dict({ + 'help': 'Access the home, favorites, and shared folders as well as the connectors.', + 'value': 'top', + }), + ]), + 'exclusive': False, + 'help': ''' + ID of the root folder. + + Leave blank to access "Personal Folders". You can use one of the + standard values here or any folder ID (long hex number ID). + ''', + 'ispassword': False, + 'name': 'root_folder_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 67108864.0, + 'default_str': '64Mi', + 'exclusive': False, + 'help': ''' + Upload chunk size. + + Must a power of 2 >= 256k. + + Making this larger will improve performance, but note that each chunk + is buffered in memory one per transfer. + + Reducing this will reduce memory usage but decrease performance. + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Endpoint for API calls. + + This is usually auto discovered as part of the oauth process, but can + be set manually to something like: https://XXX.sharefile.com + + ''', + 'ispassword': False, + 'name': 'endpoint', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 57091982.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,LeftSpace,LeftPeriod,RightSpace,RightPeriod,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'sharefile', + }), + dict({ + 'description': 'Sia Decentralized Cloud', + 'name': 'sia', + 'options': list([ + dict({ + 'advanced': False, + 'default': 'http://127.0.0.1:9980', + 'default_str': 'http://127.0.0.1:9980', + 'exclusive': False, + 'help': ''' + Sia daemon API URL, like http://sia.daemon.host:9980. + + Note that siad must run with --disable-api-security to open API port for other hosts (not recommended). + Keep default if Sia daemon runs on localhost. + ''', + 'ispassword': False, + 'name': 'api_url', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sia Daemon API Password. + + Can be found in the apipassword file located in HOME/.sia/ or in the daemon directory. + ''', + 'ispassword': True, + 'name': 'api_password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 'Sia-Agent', + 'default_str': 'Sia-Agent', + 'exclusive': False, + 'help': ''' + Siad User Agent + + Sia daemon requires the 'Sia-Agent' user agent by default for security + ''', + 'ispassword': False, + 'name': 'user_agent', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50436354.0, + 'default_str': 'Slash,Question,Hash,Percent,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'sia', + }), + dict({ + 'description': 'SMB / CIFS', + 'name': 'smb', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + SMB server hostname to connect to. + + E.g. "example.com". + ''', + 'ispassword': False, + 'name': 'host', + 'required': True, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'vscode', + 'default_str': 'vscode', + 'exclusive': False, + 'help': 'SMB username.', + 'ispassword': False, + 'name': 'user', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 445.0, + 'default_str': '445', + 'exclusive': False, + 'help': 'SMB port number.', + 'ispassword': False, + 'name': 'port', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'SMB password.', + 'ispassword': True, + 'name': 'pass', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'WORKGROUP', + 'default_str': 'WORKGROUP', + 'exclusive': False, + 'help': 'Domain name for NTLM authentication.', + 'ispassword': False, + 'name': 'domain', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Service principal name. + + Rclone presents this name to the server. Some servers use this as further + authentication, and it often needs to be set for clusters. For example: + + cifs/remotehost:1020 + + Leave blank if not sure. + + ''', + 'ispassword': False, + 'name': 'spn', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use Kerberos authentication. + + If set, rclone will use Kerberos authentication instead of NTLM. This + requires a valid Kerberos configuration and credentials cache to be + available, either in the default locations or as specified by the + KRB5_CONFIG and KRB5CCNAME environment variables. + + ''', + 'ispassword': False, + 'name': 'use_kerberos', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 60000000000.0, + 'default_str': '1m0s', + 'exclusive': False, + 'help': ''' + Max time before closing idle connections. + + If no connections have been returned to the connection pool in the time + given, rclone will empty the connection pool. + + Set to 0 to keep connections indefinitely. + + ''', + 'ispassword': False, + 'name': 'idle_timeout', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': "Hide special shares (e.g. print$) which users aren't supposed to access.", + 'ispassword': False, + 'name': 'hide_special_share', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': ''' + Whether the server is configured to be case-insensitive. + + Always true on Windows shares. + ''', + 'ispassword': False, + 'name': 'case_insensitive', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 56698766.0, + 'default_str': 'Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,BackSlash,Ctl,RightSpace,RightPeriod,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'smb', + }), + dict({ + 'description': 'Storj Decentralized Cloud Storage', + 'name': 'storj', + 'options': list([ + dict({ + 'advanced': False, + 'default': 'existing', + 'default_str': 'existing', + 'examples': list([ + dict({ + 'help': 'Use an existing access grant.', + 'value': 'existing', + }), + dict({ + 'help': 'Create a new access grant from satellite address, API key, and passphrase.', + 'value': 'new', + }), + ]), + 'exclusive': False, + 'help': 'Choose an authentication method.', + 'ispassword': False, + 'name': 'provider', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Access grant.', + 'ispassword': False, + 'name': 'access_grant', + 'provider': 'existing', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'us1.storj.io', + 'default_str': 'us1.storj.io', + 'examples': list([ + dict({ + 'help': 'US1', + 'value': 'us1.storj.io', + }), + dict({ + 'help': 'EU1', + 'value': 'eu1.storj.io', + }), + dict({ + 'help': 'AP1', + 'value': 'ap1.storj.io', + }), + ]), + 'exclusive': False, + 'help': ''' + Satellite address. + + Custom satellite address should match the format: `@
:`. + ''', + 'ispassword': False, + 'name': 'satellite_address', + 'provider': 'new', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'API key.', + 'ispassword': False, + 'name': 'api_key', + 'provider': 'new', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Encryption passphrase. + + To access existing objects enter passphrase used for uploading. + ''', + 'ispassword': False, + 'name': 'passphrase', + 'provider': 'new', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'storj', + }), + dict({ + 'description': 'Sugarsync', + 'name': 'sugarsync', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync App ID. + + Leave blank to use rclone's. + ''', + 'ispassword': False, + 'name': 'app_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync Access Key ID. + + Leave blank to use rclone's. + ''', + 'ispassword': False, + 'name': 'access_key_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync Private Access Key. + + Leave blank to use rclone's. + ''', + 'ispassword': False, + 'name': 'private_access_key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Permanently delete files if true + otherwise put them in the deleted files. + ''', + 'ispassword': False, + 'name': 'hard_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync refresh token. + + Leave blank normally, will be auto configured by rclone. + ''', + 'ispassword': False, + 'name': 'refresh_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync authorization. + + Leave blank normally, will be auto configured by rclone. + ''', + 'ispassword': False, + 'name': 'authorization', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync authorization expiry. + + Leave blank normally, will be auto configured by rclone. + ''', + 'ispassword': False, + 'name': 'authorization_expiry', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync user. + + Leave blank normally, will be auto configured by rclone. + ''', + 'ispassword': False, + 'name': 'user', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync root id. + + Leave blank normally, will be auto configured by rclone. + ''', + 'ispassword': False, + 'name': 'root_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Sugarsync deleted folder id. + + Leave blank normally, will be auto configured by rclone. + ''', + 'ispassword': False, + 'name': 'deleted_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 50397186.0, + 'default_str': 'Slash,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'sugarsync', + }), + dict({ + 'description': 'OpenStack Swift (Rackspace Cloud Files, Blomp Cloud Storage, Memset Memstore, OVH)', + 'name': 'swift', + 'options': list([ + dict({ + 'advanced': False, + 'default': False, + 'default_str': 'false', + 'examples': list([ + dict({ + 'help': 'Enter swift credentials in the next step.', + 'value': 'false', + }), + dict({ + 'help': ''' + Get swift credentials from environment vars. + Leave other fields blank if using this. + ''', + 'value': 'true', + }), + ]), + 'exclusive': False, + 'help': 'Get swift credentials from environment variables in standard OpenStack form.', + 'ispassword': False, + 'name': 'env_auth', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'User name to log in (OS_USERNAME).', + 'ispassword': False, + 'name': 'user', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'API key or password (OS_PASSWORD).', + 'ispassword': False, + 'name': 'key', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Rackspace US', + 'value': 'https://auth.api.rackspacecloud.com/v1.0', + }), + dict({ + 'help': 'Rackspace UK', + 'value': 'https://lon.auth.api.rackspacecloud.com/v1.0', + }), + dict({ + 'help': 'Rackspace v2', + 'value': 'https://identity.api.rackspacecloud.com/v2.0', + }), + dict({ + 'help': 'Memset Memstore UK', + 'value': 'https://auth.storage.memset.com/v1.0', + }), + dict({ + 'help': 'Memset Memstore UK v2', + 'value': 'https://auth.storage.memset.com/v2.0', + }), + dict({ + 'help': 'OVH', + 'value': 'https://auth.cloud.ovh.net/v3', + }), + dict({ + 'help': 'Blomp Cloud Storage', + 'value': 'https://authenticate.ain.net', + }), + ]), + 'exclusive': False, + 'help': 'Authentication URL for server (OS_AUTH_URL).', + 'ispassword': False, + 'name': 'auth', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'User ID to log in - optional - most swift systems use user and leave this blank (v3 auth) (OS_USER_ID).', + 'ispassword': False, + 'name': 'user_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'User domain - optional (v3 auth) (OS_USER_DOMAIN_NAME)', + 'ispassword': False, + 'name': 'domain', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Tenant name - optional for v1 auth, this or tenant_id required otherwise (OS_TENANT_NAME or OS_PROJECT_NAME).', + 'ispassword': False, + 'name': 'tenant', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Tenant ID - optional for v1 auth, this or tenant required otherwise (OS_TENANT_ID).', + 'ispassword': False, + 'name': 'tenant_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Tenant domain - optional (v3 auth) (OS_PROJECT_DOMAIN_NAME).', + 'ispassword': False, + 'name': 'tenant_domain', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Region name - optional (OS_REGION_NAME).', + 'ispassword': False, + 'name': 'region', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Storage URL - optional (OS_STORAGE_URL).', + 'ispassword': False, + 'name': 'storage_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Auth Token from alternate authentication - optional (OS_AUTH_TOKEN).', + 'ispassword': False, + 'name': 'auth_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Application Credential ID (OS_APPLICATION_CREDENTIAL_ID).', + 'ispassword': False, + 'name': 'application_credential_id', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Application Credential Name (OS_APPLICATION_CREDENTIAL_NAME).', + 'ispassword': False, + 'name': 'application_credential_name', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Application Credential Secret (OS_APPLICATION_CREDENTIAL_SECRET).', + 'ispassword': False, + 'name': 'application_credential_secret', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': 'AuthVersion - optional - set to (1,2,3) if your auth URL has no version (ST_AUTH_VERSION).', + 'ispassword': False, + 'name': 'auth_version', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': False, + 'default': 'public', + 'default_str': 'public', + 'examples': list([ + dict({ + 'help': 'Public (default, choose this if not sure)', + 'value': 'public', + }), + dict({ + 'help': 'Internal (use internal service net)', + 'value': 'internal', + }), + dict({ + 'help': 'Admin', + 'value': 'admin', + }), + ]), + 'exclusive': False, + 'help': 'Endpoint type to choose from the service catalogue (OS_ENDPOINT_TYPE).', + 'ispassword': False, + 'name': 'endpoint_type', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + If true avoid calling abort upload on a failure. + + It should be set to true for resuming uploads across different sessions. + ''', + 'ispassword': False, + 'name': 'leave_parts_on_error', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Default', + 'value': '', + }), + dict({ + 'help': 'OVH Public Cloud Storage', + 'value': 'pcs', + }), + dict({ + 'help': 'OVH Public Cloud Archive', + 'value': 'pca', + }), + ]), + 'exclusive': False, + 'help': ''' + The storage policy to use when creating a new container. + + This applies the specified storage policy when creating a new + container. The policy cannot be changed afterwards. The allowed + configuration values and their meaning depend on your Swift storage + provider. + ''', + 'ispassword': False, + 'name': 'storage_policy', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + When paginating, always fetch unless we received an empty page. + + Consider using this option if rclone listings show fewer objects + than expected, or if repeated syncs copy unchanged objects. + + It is safe to enable this, but rclone may make more API calls than + necessary. + + This is one of a pair of workarounds to handle implementations + of the Swift API that do not implement pagination as expected. See + also "partial_page_fetch_threshold". + ''', + 'ispassword': False, + 'name': 'fetch_until_empty_page', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 0.0, + 'default_str': '0', + 'exclusive': False, + 'help': ''' + When paginating, fetch if the current page is within this percentage of the limit. + + Consider using this option if rclone listings show fewer objects + than expected, or if repeated syncs copy unchanged objects. + + It is safe to enable this, but rclone may make more API calls than + necessary. + + This is one of a pair of workarounds to handle implementations + of the Swift API that do not implement pagination as expected. See + also "fetch_until_empty_page". + ''', + 'ispassword': False, + 'name': 'partial_page_fetch_threshold', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 5368709120.0, + 'default_str': '5Gi', + 'exclusive': False, + 'help': ''' + Above this size files will be chunked. + + Above this size files will be chunked into a a `_segments` container + or a `.file-segments` directory. (See the `use_segments_container` option + for more info). Default for this is 5 GiB which is its maximum value, which + means only files above this size will be chunked. + + Rclone uploads chunked files as dynamic large objects (DLO). + + ''', + 'ispassword': False, + 'name': 'chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Don't chunk files during streaming upload. + + When doing streaming uploads (e.g. using `rcat` or `mount` with + `--vfs-cache-mode off`) setting this flag will cause the swift backend + to not upload chunked files. + + This will limit the maximum streamed upload size to 5 GiB. This is + useful because non chunked files are easier to deal with and have an + MD5SUM. + + Rclone will still chunk files bigger than `chunk_size` when doing + normal copy operations. + ''', + 'ispassword': False, + 'name': 'no_chunk', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Disable support for static and dynamic large objects + + Swift cannot transparently store files bigger than 5 GiB. There are + two schemes for chunking large files, static large objects (SLO) or + dynamic large objects (DLO), and the API does not allow rclone to + determine whether a file is a static or dynamic large object without + doing a HEAD on the object. Since these need to be treated + differently, this means rclone has to issue HEAD requests for objects + for example when reading checksums. + + When `no_large_objects` is set, rclone will assume that there are no + static or dynamic large objects stored. This means it can stop doing + the extra HEAD calls which in turn increases performance greatly + especially when doing a swift to swift transfer with `--checksum` set. + + Setting this option implies `no_chunk` and also that no files will be + uploaded in chunks, so files bigger than 5 GiB will just fail on + upload. + + If you set this option and there **are** static or dynamic large objects, + then this will give incorrect hashes for them. Downloads will succeed, + but other operations such as Remove and Copy will fail. + + ''', + 'ispassword': False, + 'name': 'no_large_objects', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': dict({ + 'valid': False, + 'value': False, + }), + 'default_str': 'unset', + 'exclusive': False, + 'help': ''' + Choose destination for large object segments + + Swift cannot transparently store files bigger than 5 GiB and rclone + will chunk files larger than `chunk_size` (default 5 GiB) in order to + upload them. + + If this value is `true` the chunks will be stored in an additional + container named the same as the destination container but with + `_segments` appended. This means that there won't be any duplicated + data in the original container but having another container may not be + acceptable. + + If this value is `false` the chunks will be stored in a + `.file-segments` directory in the root of the container. This + directory will be omitted when listing the container. Some + providers (eg Blomp) require this mode as creating additional + containers isn't allowed. If it is desired to see the `.file-segments` + directory in the root then this flag must be set to `true`. + + If this value is `unset` (the default), then rclone will choose the value + to use. It will be `false` unless rclone detects any `auth_url`s that + it knows need it to be `true`. In this case you'll see a message in + the DEBUG log. + + ''', + 'ispassword': False, + 'name': 'use_segments_container', + 'required': False, + 'sensitive': False, + 'type': 'Tristate', + }), + dict({ + 'advanced': True, + 'default': 16777218.0, + 'default_str': 'Slash,InvalidUtf8', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'swift', + }), + dict({ + 'description': 'Storj Decentralized Cloud Storage', + 'name': 'tardigrade', + 'options': list([ + dict({ + 'advanced': False, + 'default': 'existing', + 'default_str': 'existing', + 'examples': list([ + dict({ + 'help': 'Use an existing access grant.', + 'value': 'existing', + }), + dict({ + 'help': 'Create a new access grant from satellite address, API key, and passphrase.', + 'value': 'new', + }), + ]), + 'exclusive': False, + 'help': 'Choose an authentication method.', + 'ispassword': False, + 'name': 'provider', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Access grant.', + 'ispassword': False, + 'name': 'access_grant', + 'provider': 'existing', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': 'us1.storj.io', + 'default_str': 'us1.storj.io', + 'examples': list([ + dict({ + 'help': 'US1', + 'value': 'us1.storj.io', + }), + dict({ + 'help': 'EU1', + 'value': 'eu1.storj.io', + }), + dict({ + 'help': 'AP1', + 'value': 'ap1.storj.io', + }), + ]), + 'exclusive': False, + 'help': ''' + Satellite address. + + Custom satellite address should match the format: `@
:`. + ''', + 'ispassword': False, + 'name': 'satellite_address', + 'provider': 'new', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'API key.', + 'ispassword': False, + 'name': 'api_key', + 'provider': 'new', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Encryption passphrase. + + To access existing objects enter passphrase used for uploading. + ''', + 'ispassword': False, + 'name': 'passphrase', + 'provider': 'new', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'tardigrade', + }), + dict({ + 'description': 'Uloz.to', + 'name': 'ulozto', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The application token identifying the app. An app API key can be either found in the API + doc https://uloz.to/upload-resumable-api-beta or obtained from customer service. + ''', + 'ispassword': False, + 'name': 'app_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The username of the principal to operate as.', + 'ispassword': False, + 'name': 'username', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'The password for the user.', + 'ispassword': True, + 'name': 'password', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + If set, rclone will use this folder as the root folder for all operations. For example, + if the slug identifies 'foo/bar/', 'ulozto:baz' is equivalent to 'ulozto:foo/bar/baz' without + any root slug set. + ''', + 'ispassword': False, + 'name': 'root_folder_slug', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 500.0, + 'default_str': '500', + 'exclusive': False, + 'help': 'The size of a single page for list commands. 1-500', + 'ispassword': False, + 'name': 'list_page_size', + 'required': False, + 'sensitive': False, + 'type': 'int', + }), + dict({ + 'advanced': True, + 'default': 50438146.0, + 'default_str': 'Slash,BackSlash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'ulozto', + }), + dict({ + 'description': 'Uptobox', + 'name': 'uptobox', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Your access token. + + Get it from https://uptobox.com/my_account. + ''', + 'ispassword': False, + 'name': 'access_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Set to make uploaded files private', + 'ispassword': False, + 'name': 'private', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50561070.0, + 'default_str': 'Slash,LtGt,DoubleQuote,BackQuote,Del,Ctl,LeftSpace,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'uptobox', + }), + dict({ + 'description': 'WebDAV', + 'name': 'webdav', + 'options': list([ + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + URL of http host to connect to. + + E.g. https://example.com. + ''', + 'ispassword': False, + 'name': 'url', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Fastmail Files', + 'value': 'fastmail', + }), + dict({ + 'help': 'Nextcloud', + 'value': 'nextcloud', + }), + dict({ + 'help': 'Owncloud 10 PHP based WebDAV server', + 'value': 'owncloud', + }), + dict({ + 'help': 'ownCloud Infinite Scale', + 'value': 'infinitescale', + }), + dict({ + 'help': 'Sharepoint Online, authenticated by Microsoft account', + 'value': 'sharepoint', + }), + dict({ + 'help': 'Sharepoint with NTLM authentication, usually self-hosted or on-premises', + 'value': 'sharepoint-ntlm', + }), + dict({ + 'help': 'rclone WebDAV server to serve a remote over HTTP via the WebDAV protocol', + 'value': 'rclone', + }), + dict({ + 'help': 'Other site/service or software', + 'value': 'other', + }), + ]), + 'exclusive': False, + 'help': 'Name of the WebDAV site/service/software you are using.', + 'ispassword': False, + 'name': 'vendor', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + User name. + + In case NTLM authentication is used, the username should be in the format 'Domain\User'. + ''', + 'ispassword': False, + 'name': 'user', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Password.', + 'ispassword': True, + 'name': 'pass', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Bearer token instead of user/pass (e.g. a Macaroon).', + 'ispassword': False, + 'name': 'bearer_token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Command to run to get a bearer token.', + 'ispassword': False, + 'name': 'bearer_token_command', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + + Default encoding is Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8 for sharepoint-ntlm or identity otherwise. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Set HTTP headers for all transactions. + + Use this to set additional HTTP headers for all transactions + + The input format is comma separated list of key,value pairs. Standard + [CSV encoding](https://godoc.org/encoding/csv) may be used. + + For example, to set a Cookie use 'Cookie,name=value', or '"Cookie","name=value"'. + + You can set multiple headers, e.g. '"Cookie","name=value","Authorization","xxx"'. + + ''', + 'ispassword': False, + 'name': 'headers', + 'required': False, + 'sensitive': False, + 'type': 'CommaSepList', + }), + dict({ + 'advanced': True, + 'default': 10000000.0, + 'default_str': '10ms', + 'exclusive': False, + 'help': 'Minimum time to sleep between API calls.', + 'ispassword': False, + 'name': 'pacer_min_sleep', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': 10485760.0, + 'default_str': '10Mi', + 'exclusive': False, + 'help': ''' + Nextcloud upload chunk size. + + We recommend configuring your NextCloud instance to increase the max chunk size to 1 GB for better upload performances. + See https://docs.nextcloud.com/server/latest/admin_manual/configuration_files/big_file_upload_configuration.html#adjust-chunk-size-on-nextcloud-side + + Set to 0 to disable chunked uploading. + + ''', + 'ispassword': False, + 'name': 'nextcloud_chunk_size', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Exclude ownCloud shares', + 'ispassword': False, + 'name': 'owncloud_exclude_shares', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Exclude ownCloud mounted storages', + 'ispassword': False, + 'name': 'owncloud_exclude_mounts', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Path to a unix domain socket to dial to, instead of opening a TCP connection directly', + 'ispassword': False, + 'name': 'unix_socket', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Preserve authentication on redirect. + + If the server redirects rclone to a new domain when it is trying to + read a file then normally rclone will drop the Authorization: header + from the request. + + This is standard security practice to avoid sending your credentials + to an unknown webserver. + + However this is desirable in some circumstances. If you are getting + an error like "401 Unauthorized" when rclone is attempting to read + files from the webdav server then you can try this option. + + ''', + 'ispassword': False, + 'name': 'auth_redirect', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'webdav', + }), + dict({ + 'description': 'Yandex Disk', + 'name': 'yandex', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Delete files permanently rather than putting them into the trash.', + 'ispassword': False, + 'name': 'hard_delete', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': 50429954.0, + 'default_str': 'Slash,Del,Ctl,InvalidUtf8,Dot', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': True, + 'default_str': 'true', + 'exclusive': False, + 'help': 'Set the user agent to match an official version of the yandex disk client. May help with upload performance.', + 'ispassword': False, + 'name': 'spoof_ua', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'yandex', + }), + dict({ + 'description': 'Zoho', + 'name': 'zoho', + 'options': list([ + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'OAuth Access Token as a JSON blob.', + 'ispassword': False, + 'name': 'token', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Auth server URL. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'auth_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + Token server url. + + Leave blank to use the provider defaults. + ''', + 'ispassword': False, + 'name': 'token_url', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Use client credentials OAuth flow. + + This will use the OAUTH2 client Credentials Flow as described in RFC 6749. + ''', + 'ispassword': False, + 'name': 'client_credentials', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'United states / Global', + 'value': 'com', + }), + dict({ + 'help': 'Europe', + 'value': 'eu', + }), + dict({ + 'help': 'India', + 'value': 'in', + }), + dict({ + 'help': 'Japan', + 'value': 'jp', + }), + dict({ + 'help': 'China', + 'value': 'com.cn', + }), + dict({ + 'help': 'Australia', + 'value': 'com.au', + }), + ]), + 'exclusive': False, + 'help': ''' + Zoho region to connect to. + + You'll have to use the region your organization is registered in. If + not sure use the same top level domain as you connect to in your + browser. + ''', + 'ispassword': False, + 'name': 'region', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': 10485760.0, + 'default_str': '10Mi', + 'exclusive': False, + 'help': 'Cutoff for switching to large file upload api (>= 10 MiB).', + 'ispassword': False, + 'name': 'upload_cutoff', + 'required': False, + 'sensitive': False, + 'type': 'SizeSuffix', + }), + dict({ + 'advanced': True, + 'default': 16875520.0, + 'default_str': 'Del,Ctl,InvalidUtf8', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'Encoding', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'zoho', + }), + dict({ + 'description': 'Polybox', + 'name': 'PolyBox', + 'options': list([ + dict({ + 'advanced': False, + 'default': 'https://polybox.ethz.ch/remote.php/webdav/', + 'default_str': '', + 'exclusive': False, + 'help': ''' + URL of http host to connect to. + + E.g. https://example.com. + ''', + 'ispassword': False, + 'name': 'url', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + User name. + + In case NTLM authentication is used, the username should be in the format 'Domain\User'. + ''', + 'ispassword': False, + 'name': 'user', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Password.', + 'ispassword': True, + 'name': 'pass', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Bearer token instead of user/pass (e.g. a Macaroon).', + 'ispassword': False, + 'name': 'bearer_token', + 'provider': 'personal', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Command to run to get a bearer token.', + 'ispassword': False, + 'name': 'bearer_token_command', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + + Default encoding is Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8 for sharepoint-ntlm or identity otherwise. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Set HTTP headers for all transactions. + + Use this to set additional HTTP headers for all transactions + + The input format is comma separated list of key,value pairs. Standard + [CSV encoding](https://godoc.org/encoding/csv) may be used. + + For example, to set a Cookie use 'Cookie,name=value', or '"Cookie","name=value"'. + + You can set multiple headers, e.g. '"Cookie","name=value","Authorization","xxx"'. + + ''', + 'ispassword': False, + 'name': 'headers', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'CommaSepList', + }), + dict({ + 'advanced': True, + 'default': 10000000.0, + 'default_str': '10ms', + 'exclusive': False, + 'help': 'Minimum time to sleep between API calls.', + 'ispassword': False, + 'name': 'pacer_min_sleep', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Exclude ownCloud shares', + 'ispassword': False, + 'name': 'owncloud_exclude_shares', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Exclude ownCloud mounted storages', + 'ispassword': False, + 'name': 'owncloud_exclude_mounts', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Path to a unix domain socket to dial to, instead of opening a TCP connection directly', + 'ispassword': False, + 'name': 'unix_socket', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Preserve authentication on redirect. + + If the server redirects rclone to a new domain when it is trying to + read a file then normally rclone will drop the Authorization: header + from the request. + + This is standard security practice to avoid sending your credentials + to an unknown webserver. + + However this is desirable in some circumstances. If you are getting + an error like "401 Unauthorized" when rclone is attempting to read + files from the webdav server then you can try this option. + + ''', + 'ispassword': False, + 'name': 'auth_redirect', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Connect to your personal storage space. This data connector cannot be used to share access to a folder.', + 'provider': '', + 'value': 'personal', + }), + dict({ + 'help': "Connect a 'public' folder shared with others. A 'public' folder may or may not be protected with a password.", + 'provider': '', + 'value': 'shared', + }), + ]), + 'exclusive': True, + 'help': 'Choose the mode to access the data source.', + 'ispassword': False, + 'name': 'provider', + 'provider': '', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Shared folder link. E.g., https://polybox.ethz.ch/index.php/s/8NffJ3rFyHaVyyy', + 'ispassword': False, + 'name': 'public_link', + 'provider': 'shared', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'polybox', + }), + dict({ + 'description': 'SwitchDrive', + 'name': 'SwitchDrive', + 'options': list([ + dict({ + 'advanced': False, + 'default': 'https://drive.switch.ch/remote.php/webdav/', + 'default_str': '', + 'exclusive': False, + 'help': ''' + URL of http host to connect to. + + E.g. https://example.com. + ''', + 'ispassword': False, + 'name': 'url', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + User name. + + In case NTLM authentication is used, the username should be in the format 'Domain\User'. + ''', + 'ispassword': False, + 'name': 'user', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Password.', + 'ispassword': True, + 'name': 'pass', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Bearer token instead of user/pass (e.g. a Macaroon).', + 'ispassword': False, + 'name': 'bearer_token', + 'provider': 'personal', + 'required': False, + 'sensitive': True, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Command to run to get a bearer token.', + 'ispassword': False, + 'name': 'bearer_token_command', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': ''' + The encoding for the backend. + + See the [encoding section in the overview](/overview/#encoding) for more info. + + Default encoding is Slash,LtGt,DoubleQuote,Colon,Question,Asterisk,Pipe,Hash,Percent,BackSlash,Del,Ctl,LeftSpace,LeftTilde,RightSpace,RightPeriod,InvalidUtf8 for sharepoint-ntlm or identity otherwise. + ''', + 'ispassword': False, + 'name': 'encoding', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': list([ + ]), + 'default_str': '', + 'exclusive': False, + 'help': ''' + Set HTTP headers for all transactions. + + Use this to set additional HTTP headers for all transactions + + The input format is comma separated list of key,value pairs. Standard + [CSV encoding](https://godoc.org/encoding/csv) may be used. + + For example, to set a Cookie use 'Cookie,name=value', or '"Cookie","name=value"'. + + You can set multiple headers, e.g. '"Cookie","name=value","Authorization","xxx"'. + + ''', + 'ispassword': False, + 'name': 'headers', + 'provider': 'personal', + 'required': False, + 'sensitive': False, + 'type': 'CommaSepList', + }), + dict({ + 'advanced': True, + 'default': 10000000.0, + 'default_str': '10ms', + 'exclusive': False, + 'help': 'Minimum time to sleep between API calls.', + 'ispassword': False, + 'name': 'pacer_min_sleep', + 'required': False, + 'sensitive': False, + 'type': 'Duration', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Exclude ownCloud shares', + 'ispassword': False, + 'name': 'owncloud_exclude_shares', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': 'Exclude ownCloud mounted storages', + 'ispassword': False, + 'name': 'owncloud_exclude_mounts', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Path to a unix domain socket to dial to, instead of opening a TCP connection directly', + 'ispassword': False, + 'name': 'unix_socket', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': True, + 'default': False, + 'default_str': 'false', + 'exclusive': False, + 'help': ''' + Preserve authentication on redirect. + + If the server redirects rclone to a new domain when it is trying to + read a file then normally rclone will drop the Authorization: header + from the request. + + This is standard security practice to avoid sending your credentials + to an unknown webserver. + + However this is desirable in some circumstances. If you are getting + an error like "401 Unauthorized" when rclone is attempting to read + files from the webdav server then you can try this option. + + ''', + 'ispassword': False, + 'name': 'auth_redirect', + 'required': False, + 'sensitive': False, + 'type': 'bool', + }), + dict({ + 'advanced': True, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Description of the remote.', + 'ispassword': False, + 'name': 'description', + 'required': False, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'examples': list([ + dict({ + 'help': 'Connect to your personal storage space. This data connector cannot be used to share access to a folder.', + 'provider': '', + 'value': 'personal', + }), + dict({ + 'help': "Connect a 'public' folder shared with others. A 'public' folder may or may not be protected with a password.", + 'provider': '', + 'value': 'shared', + }), + ]), + 'exclusive': True, + 'help': 'Choose the mode to access the data source.', + 'ispassword': False, + 'name': 'provider', + 'provider': '', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + dict({ + 'advanced': False, + 'default': '', + 'default_str': '', + 'exclusive': False, + 'help': 'Shared folder link. E.g., https://drive.switch.ch/index.php/s/OPSd72zrs5JG666', + 'ispassword': False, + 'name': 'public_link', + 'provider': 'shared', + 'required': True, + 'sensitive': False, + 'type': 'string', + }), + ]), + 'prefix': 'switchDrive', + }), + ]) +# --- diff --git a/test/bases/renku_data_services/data_api/__snapshots__/test_user_preferences.ambr b/test/bases/renku_data_services/data_api/__snapshots__/test_user_preferences.ambr new file mode 100644 index 000000000..cae730c06 --- /dev/null +++ b/test/bases/renku_data_services/data_api/__snapshots__/test_user_preferences.ambr @@ -0,0 +1,31 @@ +# serializer version: 1 +# name: test_get_user_preferences[valid_add_pinned_project_payload0] + dict({ + 'pinned_projects': dict({ + 'project_slugs': list([ + 'user.1/first-project', + ]), + }), + 'show_project_migration_banner': True, + }) +# --- +# name: test_get_user_preferences[valid_add_pinned_project_payload1] + dict({ + 'pinned_projects': dict({ + 'project_slugs': list([ + 'john-doe-1/my-project-1', + ]), + }), + 'show_project_migration_banner': True, + }) +# --- +# name: test_get_user_preferences[valid_add_pinned_project_payload2] + dict({ + 'pinned_projects': dict({ + 'project_slugs': list([ + 'a-1-2-3/b-1-2-3', + ]), + }), + 'show_project_migration_banner': True, + }) +# --- diff --git a/test/bases/renku_data_services/data_api/conftest.py b/test/bases/renku_data_services/data_api/conftest.py index 9b1514282..8b422c5a2 100644 --- a/test/bases/renku_data_services/data_api/conftest.py +++ b/test/bases/renku_data_services/data_api/conftest.py @@ -1,29 +1,40 @@ import json -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Callable from copy import deepcopy -from typing import Any +from typing import Any, Protocol +import pytest import pytest_asyncio from authzed.api.v1 import Relationship, RelationshipUpdate, SubjectReference, WriteRelationshipsRequest +from httpx import Response from sanic import Sanic from sanic_testing.testing import SanicASGITestClient from ulid import ULID -from components.renku_data_services.utils.middleware import validate_null_byte -from renku_data_services.app_config.config import Config +import renku_data_services.search.core as search_core from renku_data_services.authz.admin_sync import sync_admins_from_keycloak from renku_data_services.authz.authz import _AuthzConverter from renku_data_services.base_models import Slug +from renku_data_services.base_models.core import APIUser, InternalServiceAdmin, NamespacePath, ServiceAdminId from renku_data_services.data_api.app import register_all_handlers +from renku_data_services.data_api.dependencies import DependencyManager +from renku_data_services.data_connectors.apispec import DataConnector as ApiDataConnector from renku_data_services.migrations.core import run_migrations_for_app -from renku_data_services.namespace.models import Namespace, NamespaceKind -from renku_data_services.secrets.config import Config as SecretsConfig +from renku_data_services.namespace.apispec import GroupResponse as ApiGroup +from renku_data_services.namespace.models import UserNamespace +from renku_data_services.project.apispec import Project as ApiProject +from renku_data_services.search.apispec import SearchResult from renku_data_services.secrets_storage_api.app import register_all_handlers as register_secrets_handlers +from renku_data_services.secrets_storage_api.dependencies import DependencyManager as SecretsDependencyManager +from renku_data_services.solr import entity_schema +from renku_data_services.solr.solr_client import DefaultSolrClient +from renku_data_services.solr.solr_migrate import SchemaMigrator from renku_data_services.storage.rclone import RCloneValidator from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI from renku_data_services.users.models import UserInfo -from test.bases.renku_data_services.background_jobs.test_sync import get_kc_users -from test.utils import SanicReusableASGITestClient +from renku_data_services.utils.middleware import validate_null_byte +from test.bases.renku_data_services.data_tasks.test_sync import get_kc_users +from test.utils import SanicReusableASGITestClient, TestDependencyManager @pytest_asyncio.fixture(scope="session") @@ -33,8 +44,11 @@ async def admin_user() -> UserInfo: first_name="Admin", last_name="Doe", email="admin.doe@gmail.com", - namespace=Namespace( - id=ULID(), slug="admin.doe", kind=NamespaceKind.user, underlying_resource_id="admin", created_by="admin" + namespace=UserNamespace( + id=ULID(), + underlying_resource_id="admin", + created_by="admin", + path=NamespacePath.from_strings("admin.doe"), ), ) @@ -46,8 +60,11 @@ async def regular_user() -> UserInfo: first_name="User", last_name="Doe", email="user.doe@gmail.com", - namespace=Namespace( - id=ULID(), slug="user.doe", kind=NamespaceKind.user, underlying_resource_id="user", created_by="user" + namespace=UserNamespace( + id=ULID(), + underlying_resource_id="user", + created_by="user", + path=NamespacePath.from_strings("user.doe"), ), ) @@ -59,12 +76,11 @@ async def member_1_user() -> UserInfo: first_name="Member-1", last_name="Doe", email="member-1.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="member-1.doe", - kind=NamespaceKind.user, underlying_resource_id="member-1", created_by="member-1", + path=NamespacePath.from_strings("member-1.doe"), ), ) @@ -76,12 +92,11 @@ async def member_2_user() -> UserInfo: first_name="Member-2", last_name="Doe", email="member-2.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="member-2.doe", - kind=NamespaceKind.user, underlying_resource_id="member-2", created_by="member-2", + path=NamespacePath.from_strings("member-2.doe"), ), ) @@ -160,11 +175,39 @@ async def unauthorized_headers() -> dict[str, str]: return {"Authorization": "Bearer {}"} +@pytest.fixture +def headers_from_user( + admin_user: UserInfo, + admin_headers: dict[str, str], + regular_user: UserInfo, + user_headers: dict[str, str], + member_1_user: UserInfo, + member_1_headers: dict[str, str], + member_2_user: UserInfo, + member_2_headers: dict[str, str], + unauthorized_headers: dict[str, str], +) -> Callable[[UserInfo], dict[str, str]]: + def _headers_from_user(user: UserInfo) -> dict[str, str]: + match user.id: + case admin_user.id: + return admin_headers + case regular_user.id: + return user_headers + case member_1_user.id: + return member_1_headers + case member_2_user.id: + return member_2_headers + case _: + return unauthorized_headers + + return _headers_from_user + + @pytest_asyncio.fixture async def bootstrap_admins( - sanic_client_with_migrations, app_config_instance: Config, event_loop, admin_user: UserInfo + sanic_client_with_migrations, app_manager_instance: DependencyManager, event_loop, admin_user: UserInfo ) -> None: - authz = app_config_instance.authz + authz = app_manager_instance.authz rels: list[RelationshipUpdate] = [] sub = SubjectReference(object=_AuthzConverter.user(admin_user.id)) rels.append( @@ -177,10 +220,10 @@ async def bootstrap_admins( @pytest_asyncio.fixture(scope="session") -async def sanic_app_no_migrations(app_config: Config, users: list[UserInfo], admin_user: UserInfo) -> Sanic: - app_config.kc_api = DummyKeycloakAPI(users=get_kc_users(users), user_roles={admin_user.id: ["renku-admin"]}) - app = Sanic(app_config.app_name) - app = register_all_handlers(app, app_config) +async def sanic_app_no_migrations(app_manager: DependencyManager, users: list[UserInfo], admin_user: UserInfo) -> Sanic: + app_manager.kc_api = DummyKeycloakAPI(users=get_kc_users(users), user_roles={admin_user.id: ["renku-admin"]}) + app = Sanic(app_manager.app_name) + app = register_all_handlers(app, app_manager) app.register_middleware(validate_null_byte, "request") validator = RCloneValidator() app.ext.dependency(validator) @@ -195,27 +238,90 @@ async def sanic_client_no_migrations(sanic_app_no_migrations: Sanic) -> AsyncGen @pytest_asyncio.fixture async def sanic_client_with_migrations( - sanic_client_no_migrations: SanicASGITestClient, app_config_instance + sanic_client_no_migrations: SanicASGITestClient, app_manager_instance ) -> SanicASGITestClient: run_migrations_for_app("common") + return sanic_client_no_migrations @pytest_asyncio.fixture async def sanic_client( - sanic_client_with_migrations: SanicASGITestClient, app_config_instance, bootstrap_admins + sanic_client_with_migrations: SanicASGITestClient, app_manager_instance, bootstrap_admins ) -> SanicASGITestClient: - await app_config_instance.kc_user_repo.initialize(app_config_instance.kc_api) - await sync_admins_from_keycloak(app_config_instance.kc_api, app_config_instance.authz) - await app_config_instance.group_repo.generate_user_namespaces() + await app_manager_instance.kc_user_repo.initialize(app_manager_instance.kc_api) + await sync_admins_from_keycloak(app_manager_instance.kc_api, app_manager_instance.authz) + await app_manager_instance.group_repo.generate_user_namespaces() return sanic_client_with_migrations +@pytest_asyncio.fixture +async def sanic_client_with_solr(sanic_client: SanicASGITestClient, app_manager) -> SanicASGITestClient: + migrator = SchemaMigrator(app_manager.config.solr) + await migrator.migrate(entity_schema.all_migrations) + + return sanic_client + + +class SearchReprovisionCall(Protocol): + """The type for the `search_reprovision` fixture.""" + + async def __call__(self) -> None: ... + + +@pytest_asyncio.fixture +async def search_reprovision(app_manager_instance: DependencyManager, search_push_updates) -> SearchReprovisionCall: + admin = InternalServiceAdmin(id=ServiceAdminId.search_reprovision) + + async def search_reprovision_helper() -> None: + await app_manager_instance.search_reprovisioning.run_reprovision(admin) + await search_push_updates(clear_index=False) + + return search_reprovision_helper + + +@pytest_asyncio.fixture +async def search_push_updates(app_manager_instance: DependencyManager): + async def search_push_updates_helper(clear_index: bool = True) -> None: + async with DefaultSolrClient(app_manager_instance.config.solr) as client: + if clear_index: + await client.delete("*:*") + await search_core.update_solr(app_manager_instance.search_updates_repo, client, 10) + + return search_push_updates_helper + + +class SearchQueryCall(Protocol): + """The type for the `search_query` fixture.""" + + async def __call__(self, query_str: str, user: UserInfo | None = None) -> SearchResult: ... + + +@pytest_asyncio.fixture +async def search_query(sanic_client_with_solr, admin_user: UserInfo) -> SearchQueryCall: + async def search_query_helper(query_str: str, user: UserInfo | None = None) -> SearchResult: + headers = __make_headers(user, admin=user.id == admin_user.id) if user is not None else {} + _, response = await sanic_client_with_solr.get( + "/api/data/search/query", params={"q": query_str}, headers=headers or {} + ) + assert response.status_code == 200, response.text + return SearchResult.model_validate(response.json) + + return search_query_helper + + @pytest_asyncio.fixture async def create_project(sanic_client, user_headers, admin_headers, regular_user, admin_user): async def create_project_helper( - name: str, admin: bool = False, members: list[dict[str, str]] = None, **payload + name: str, + admin: bool = False, + members: list[dict[str, str]] | None = None, + description: str | None = None, + sanic_client=sanic_client, + **payload, ) -> dict[str, Any]: + if members is None: + members = [] headers = admin_headers if admin else user_headers user = admin_user if admin else regular_user payload = payload.copy() @@ -223,6 +329,8 @@ async def create_project_helper( payload.update({"name": name}) if "namespace" not in payload: payload.update({"namespace": f"{user.first_name}.{user.last_name}".lower()}) + if "description" not in payload and description is not None: + payload.update({"description": description}) _, response = await sanic_client.post("/api/data/projects", headers=headers, json=payload) @@ -241,8 +349,65 @@ async def create_project_helper( return create_project_helper +class CreateProjectCall(Protocol): + async def __call__( + self, + name: str, + user: UserInfo | None = None, + members: list[dict[str, str]] | None = None, + **payload, + ) -> ApiProject: ... + + +@pytest_asyncio.fixture +async def create_project_model(sanic_client, regular_user: UserInfo, admin_user: UserInfo) -> CreateProjectCall: + async def create_project_helper( + name: str, user: UserInfo | None = None, members: list[dict[str, str]] | None = None, **payload + ) -> ApiProject: + if "name" not in payload: + payload.update({"name": name}) + + user = user or regular_user + headers = __make_headers(user, admin=user.id == admin_user.id) + if "namespace" not in payload: + payload.update({"namespace": user.namespace.path.serialize()}) + + _, response = await sanic_client.post("/api/data/projects", headers=headers, json=payload) + + assert response.status_code == 201, response.text + project = response.json + + if members: + _, response = await sanic_client.patch( + f"/api/data/projects/{project['id']}/members", headers=headers, json=members + ) + + assert response.status_code == 200, response.text + + return ApiProject.model_validate(project) + + return create_project_helper + + +class CreateUserCall(Protocol): + async def __call__(self, user: APIUser) -> UserInfo: ... + + +@pytest_asyncio.fixture +async def create_user(app_manager_instance: TestDependencyManager) -> CreateUserCall: + repo = app_manager_instance.kc_user_repo + + async def create_user_helper(user: APIUser) -> UserInfo: + info = await repo.get_or_create_user(user, user.id or "") + if info is None: + raise Exception(f"User {user} could not be created") + return info + + return create_user_helper + + @pytest_asyncio.fixture -async def create_project_copy(sanic_client, user_headers, admin_headers, regular_user, admin_user): +async def create_project_copy(sanic_client, user_headers, headers_from_user): async def create_project_copy_helper( id: str, namespace: str, @@ -252,7 +417,7 @@ async def create_project_copy_helper( members: list[dict[str, str]] = None, **payload, ) -> dict[str, Any]: - headers = user_headers if user is None or user is regular_user else admin_headers + headers = headers_from_user(user) if user is not None else user_headers copy_payload = {"slug": Slug.from_name(name).value} copy_payload.update(payload) copy_payload.update({"namespace": namespace, "name": name}) @@ -299,6 +464,38 @@ async def create_group_helper( return create_group_helper +class CreateGroupCall(Protocol): + async def __call__( + self, name: str, user: UserInfo | None = None, members: list[dict[str, str]] | None = None, **payload + ) -> ApiGroup: ... + + +@pytest_asyncio.fixture +async def create_group_model(sanic_client, regular_user: UserInfo, admin_user: UserInfo) -> CreateGroupCall: + async def create_group_helper( + name: str, user: UserInfo | None = None, members: list[dict[str, str]] | None = None, **payload + ) -> ApiGroup: + user = user or regular_user + headers = __make_headers(user, admin=user.id == admin_user.id) + group_payload = {"slug": Slug.from_name(name).value, "name": name} + group_payload.update(payload) + _, response = await sanic_client.post("/api/data/groups", headers=headers, json=group_payload) + + assert response.status_code == 201, response.text + group = response.json + + if members: + _, response = await sanic_client.patch( + f"/api/data/groups/{group['slug']}/members", headers=headers, json=members + ) + + assert response.status_code == 200, response.text + + return ApiGroup.model_validate(group) + + return create_group_helper + + @pytest_asyncio.fixture async def create_session_environment(sanic_client: SanicASGITestClient, admin_headers): async def create_session_environment_helper(name: str, **payload) -> dict[str, Any]: @@ -306,6 +503,7 @@ async def create_session_environment_helper(name: str, **payload) -> dict[str, A payload.update({"name": name}) payload["description"] = payload.get("description") or "A session environment." payload["container_image"] = payload.get("container_image") or "some_image:some_tag" + payload["environment_image_source"] = payload.get("environment_image_source") or "image" _, res = await sanic_client.post("/api/data/environments", headers=admin_headers, json=payload) @@ -327,6 +525,7 @@ async def create_session_launcher_helper(name: str, project_id: str, **payload) "environment_kind": "CUSTOM", "name": "Test", "container_image": "some_image:some_tag", + "environment_image_source": "image", } _, res = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) @@ -349,7 +548,7 @@ async def create_data_connector_helper( "name": name, "description": "A data connector", "visibility": "private", - "namespace": user.namespace.slug, + "namespace": user.namespace.path.serialize(), "storage": { "configuration": { "type": "s3", @@ -371,6 +570,41 @@ async def create_data_connector_helper( return create_data_connector_helper +class CreateDataConnectorCall(Protocol): + async def __call__(self, name: str, user: UserInfo | None = None, **payload) -> ApiDataConnector: ... + + +@pytest_asyncio.fixture +async def create_data_connector_model( + sanic_client: SanicASGITestClient, regular_user: UserInfo, admin_user: UserInfo +) -> CreateDataConnectorCall: + async def create_dc_helper(name: str, user: UserInfo | None = None, **payload) -> ApiDataConnector: + user = user or regular_user + headers = __make_headers(user, admin=user.id == admin_user.id) + dc_payload = { + "name": name, + "visibility": "private", + "namespace": user.namespace.path.serialize(), + "storage": { + "configuration": { + "type": "s3", + "provider": "AWS", + "region": "us-east-1", + }, + "source_path": "bucket/my-folder", + "target_path": "my/target", + }, + "keywords": [], + } + dc_payload.update(payload) + _, response = await sanic_client.post("/api/data/data_connectors", headers=headers, json=dc_payload) + + assert response.status_code == 201, response.text + return ApiDataConnector.model_validate(response.json) + + return create_dc_helper + + @pytest_asyncio.fixture def create_openbis_data_connector(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers): async def create_openbis_data_connector_helper( @@ -406,7 +640,7 @@ async def create_openbis_data_connector_helper( @pytest_asyncio.fixture async def create_data_connector_and_link_project( - sanic_client, regular_user, user_headers, admin_user, admin_headers, create_data_connector + regular_user, user_headers, admin_user, admin_headers, create_data_connector, link_data_connector ): async def create_data_connector_and_link_project_helper( name: str, project_id: str, admin: bool = False, **payload @@ -416,13 +650,7 @@ async def create_data_connector_and_link_project_helper( data_connector = await create_data_connector(name, user=user, headers=headers, **payload) data_connector_id = data_connector["id"] - payload = {"project_id": project_id} - - _, response = await sanic_client.post( - f"/api/data/data_connectors/{data_connector_id}/project_links", headers=headers, json=payload - ) - - assert response.status_code == 201, response.text + response = await link_data_connector(project_id, data_connector_id, headers=headers) data_connector_link = response.json return data_connector, data_connector_link @@ -430,12 +658,25 @@ async def create_data_connector_and_link_project_helper( return create_data_connector_and_link_project_helper +@pytest.fixture +def link_data_connector(sanic_client: SanicASGITestClient): + async def _link_data_connector(project_id: str, dc_id: str, headers: dict[str, str]) -> Response: + payload = {"project_id": project_id} + _, response = await sanic_client.post( + f"/api/data/data_connectors/{dc_id}/project_links", headers=headers, json=payload + ) + assert response.status_code == 201, response.text + return response + + return _link_data_connector + + @pytest_asyncio.fixture -async def create_resource_pool(sanic_client, user_headers, admin_headers): +async def create_resource_pool(sanic_client, user_headers, admin_headers, valid_resource_pool_payload): async def create_resource_pool_helper(admin: bool = False, **payload) -> dict[str, Any]: headers = admin_headers if admin else user_headers - payload = payload.copy() - _, res = await sanic_client.post("/api/data/resource_pools", headers=headers, json=payload) + valid_resource_pool_payload.update(payload) + _, res = await sanic_client.post("/api/data/resource_pools", headers=headers, json=valid_resource_pool_payload) assert res.status_code == 201, res.text assert res.json is not None return res.json @@ -489,10 +730,10 @@ async def valid_resource_class_payload() -> dict[str, Any]: @pytest_asyncio.fixture async def secrets_sanic_client( - secrets_storage_app_config: SecretsConfig, users: list[UserInfo] + secrets_storage_app_manager: SecretsDependencyManager, users: list[UserInfo] ) -> AsyncGenerator[SanicASGITestClient, None]: - app = Sanic(secrets_storage_app_config.app_name) - app = register_secrets_handlers(app, secrets_storage_app_config) + app = Sanic(secrets_storage_app_manager.config.app_name) + app = register_secrets_handlers(app, secrets_storage_app_manager) async with SanicReusableASGITestClient(app) as client: yield client @@ -504,3 +745,18 @@ def pytest_addoption(parser): @pytest_asyncio.fixture(scope="session") def disable_cluster_creation(request): return request.config.getoption("--disable-cluster-creation") + + +def __make_headers(user: UserInfo, admin: bool = False) -> dict[str, str]: + access_token = json.dumps( + { + "is_admin": admin, + "id": user.id, + "name": f"{user.first_name} {user.last_name}", + "first_name": user.first_name, + "last_name": user.last_name, + "email": user.email, + "full_name": f"{user.first_name} {user.last_name}", + } + ) + return {"Authorization": f"Bearer {access_token}"} diff --git a/test/bases/renku_data_services/data_api/test_clusters.py b/test/bases/renku_data_services/data_api/test_clusters.py new file mode 100644 index 000000000..ef475e6c0 --- /dev/null +++ b/test/bases/renku_data_services/data_api/test_clusters.py @@ -0,0 +1,228 @@ +from copy import deepcopy + +import pytest +from sanic_testing.testing import SanicASGITestClient + +cluster_payload = { + "config_name": "a-filename_with.0.9_AND_UPPER_CASE.yaml", + "name": "test-cluster-post", + "session_protocol": "http", + "session_host": "localhost", + "session_port": 8080, + "session_path": "/renku-sessions", + "session_tls_secret_name": "a-server-domain-name-tls", + "session_ingress_annotations": { + "kubernetes.io/ingress.class": "nginx", + "cert-manager.io/cluster-issuer": "letsencrypt-production", + "nginx.ingress.kubernetes.io/configuration-snippet": """more_set_headers "Content-Security-Policy: """ + + """frame-ancestors 'self'""", + }, +} + +cluster_payload_with_storage = deepcopy(cluster_payload) +cluster_payload_with_storage["session_storage_class"] = "an-arbitrary-class-name" + + +@pytest.mark.parametrize( + "expected_status_code,auth,url", + [ + (401, False, "/api/data/clusters/"), + (200, True, "/api/data/clusters/"), + ], +) +@pytest.mark.asyncio +async def test_clusters_get( + sanic_client: SanicASGITestClient, admin_headers: dict[str, str], expected_status_code: int, auth: bool, url: str +) -> None: + if auth: + _, res = await sanic_client.get(url, headers=admin_headers) + else: + _, res = await sanic_client.get(url) + assert res.status_code == expected_status_code, res.text + + +@pytest.mark.parametrize( + "expected_status_code,auth,url,payload", + [ + (401, False, "/api/data/clusters/", cluster_payload), + (401, False, "/api/data/clusters/", cluster_payload_with_storage), + (201, True, "/api/data/clusters/", cluster_payload), + (201, True, "/api/data/clusters/", cluster_payload_with_storage), + ], +) +@pytest.mark.asyncio +async def test_clusters_post( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + url: str, + auth: bool, + payload, +) -> None: + if auth: + _, res = await sanic_client.post(url, headers=admin_headers, json=payload) + else: + _, res = await sanic_client.post(url, json=payload) + assert res.status_code == expected_status_code, res.text + + +@pytest.mark.parametrize( + "expected_status_code,auth,cluster_id", + [ + (401, False, "ZZZZZZZZZZZZZZZZZZZZZZZZZZ"), + (404, True, "ZZZZZZZZZZZZZZZZZZZZZZZZZZ"), + (401, False, "ZZZZZZZZZZZZZZZZZZZZZZZZZZYY"), + (404, True, "ZZZZZZZZZZZZZZZZZZZZZZZZZZYY"), + (401, False, "XX"), + (404, True, "XX"), + (401, False, None), + (200, True, None), + ], +) +@pytest.mark.asyncio +async def test_cluster_get_id( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + cluster_id: str | None, +) -> None: + base_url = "/api/data/clusters" + + if cluster_id is None: + _, res = await sanic_client.post(base_url, headers=admin_headers, json=cluster_payload) + assert res.status_code == 201, res.text + cluster_id = res.json["id"] + + url = f"{base_url}/{cluster_id}" + + if auth: + _, res = await sanic_client.get(url, headers=admin_headers) + else: + _, res = await sanic_client.get(url) + assert res.status_code == expected_status_code, res.text + + +async def _clusters_request( + sanic_client: SanicASGITestClient, + method: str, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + cluster_id: str | None, + payload: dict | None, + post_payload: dict, +) -> None: + base_url = "/api/data/clusters" + + check_payload = None + if cluster_id is None: + _, res = await sanic_client.post(base_url, headers=admin_headers, json=post_payload) + assert res.status_code == 201, res.text + cluster_id = res.json["id"] + + check_payload = deepcopy(payload) + if "id" not in check_payload: + check_payload["id"] = cluster_id + + url = f"{base_url}/{cluster_id}" + + if auth: + _, res = await sanic_client.request(url=url, method=method, headers=admin_headers, json=payload) + else: + _, res = await sanic_client.request(url=url, method=method, json=payload) + + assert res.status_code == expected_status_code, res.text + if res.is_success and check_payload is not None: + assert res.json == check_payload, f"\nRESULT: {res.json}\nEXPECT: {check_payload}\n" + + +put_patch_common_test_inputs = [ + (401, False, "ZZZZZZZZZZZZZZZZZZZZZZZZZZ", None, cluster_payload), + (422, True, "ZZZZZZZZZZZZZZZZZZZZZZZZZZ", None, cluster_payload), + (401, False, "ZZZZZZZZZZZZZZZZZZZZZZZZZZYY", None, cluster_payload), + (422, True, "ZZZZZZZZZZZZZZZZZZZZZZZZZZYY", None, cluster_payload), + (401, False, "XX", None, cluster_payload), + (422, True, "XX", None, cluster_payload), + (401, False, None, {"name": "new_name"}, cluster_payload), + (201, True, None, cluster_payload, cluster_payload), + (201, True, None, cluster_payload_with_storage, cluster_payload_with_storage), + (201, True, None, cluster_payload_with_storage, cluster_payload), + ( + 422, + True, + None, + {"name": "new_name", "config_name": "a-filename.yaml", "unknown_field": 42}, + cluster_payload, + ), + (404, True, "ZZZZZZZZZZZZZZZZZZZZZZZZZZ", cluster_payload, cluster_payload), +] + + +@pytest.mark.parametrize("expected_status_code,auth,cluster_id,payload,post_payload", put_patch_common_test_inputs) +@pytest.mark.asyncio +async def test_clusters_put( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + cluster_id: str | None, + payload: dict | None, + post_payload: dict, +) -> None: + await _clusters_request( + sanic_client, "PUT", admin_headers, expected_status_code, auth, cluster_id, payload, post_payload + ) + + +@pytest.mark.parametrize("expected_status_code,auth,cluster_id,payload,post_payload", put_patch_common_test_inputs) +@pytest.mark.asyncio +async def test_clusters_patch( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + cluster_id: str | None, + payload: dict | None, + post_payload: dict, +) -> None: + await _clusters_request( + sanic_client, "PATCH", admin_headers, expected_status_code, auth, cluster_id, payload, post_payload + ) + + +@pytest.mark.parametrize( + "expected_status_code,auth,cluster_id", + [ + (401, False, "ZZZZZZZZZZZZZZZZZZZZZZZZZZ"), + (204, True, "ZZZZZZZZZZZZZZZZZZZZZZZZZZ"), + (401, False, "ZZZZZZZZZZZZZZZZZZZZZZZZZZYY"), + (204, True, "ZZZZZZZZZZZZZZZZZZZZZZZZZZYY"), + (401, False, "XX"), + (204, True, "XX"), + (401, False, None), + (204, True, None), + ], +) +@pytest.mark.asyncio +async def test_cluster_delete( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + cluster_id: str | None, +) -> None: + base_url = "/api/data/clusters" + + if cluster_id is None: + _, res = await sanic_client.post(base_url, headers=admin_headers, json=cluster_payload) + assert res.status_code == 201, res.text + cluster_id = res.json["id"] + + url = f"{base_url}/{cluster_id}" + + if auth: + _, res = await sanic_client.delete(url, headers=admin_headers) + else: + _, res = await sanic_client.delete(url) + assert res.status_code == expected_status_code, res.text diff --git a/test/bases/renku_data_services/data_api/test_connected_services.py b/test/bases/renku_data_services/data_api/test_connected_services.py index 49450925d..7c2ba1c61 100644 --- a/test/bases/renku_data_services/data_api/test_connected_services.py +++ b/test/bases/renku_data_services/data_api/test_connected_services.py @@ -8,18 +8,18 @@ from sanic import Sanic from sanic_testing.testing import SanicASGITestClient -from renku_data_services.app_config import Config from renku_data_services.connected_services.dummy_async_oauth2_client import DummyAsyncOAuth2Client from renku_data_services.data_api.app import register_all_handlers +from renku_data_services.data_api.dependencies import DependencyManager from test.utils import SanicReusableASGITestClient @pytest_asyncio.fixture -async def oauth2_test_client(app_config: Config) -> SanicASGITestClient: - app_config.async_oauth2_client_class = DummyAsyncOAuth2Client - app_config.connected_services_repo.async_oauth2_client_class = DummyAsyncOAuth2Client - app = Sanic(app_config.app_name) - app = register_all_handlers(app, app_config) +async def oauth2_test_client(app_manager: DependencyManager) -> SanicASGITestClient: + app_manager.async_oauth2_client_class = DummyAsyncOAuth2Client + app_manager.connected_services_repo.async_oauth2_client_class = DummyAsyncOAuth2Client + app = Sanic(app_manager.app_name) + app = register_all_handlers(app, app_manager) async with SanicReusableASGITestClient(app) as client: yield client diff --git a/test/bases/renku_data_services/data_api/test_data_connectors.py b/test/bases/renku_data_services/data_api/test_data_connectors.py index 910702483..7f64a4306 100644 --- a/test/bases/renku_data_services/data_api/test_data_connectors.py +++ b/test/bases/renku_data_services/data_api/test_data_connectors.py @@ -1,19 +1,53 @@ +import warnings +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any, cast + import pytest +from httpx import Response from sanic_testing.testing import SanicASGITestClient +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.core import NamespacePath, ProjectPath +from renku_data_services.data_connectors import core +from renku_data_services.namespace.models import NamespaceKind +from renku_data_services.storage.rclone import RCloneDOIMetadata from renku_data_services.users.models import UserInfo from renku_data_services.utils.core import get_openbis_session_token from test.bases.renku_data_services.data_api.utils import merge_headers +if TYPE_CHECKING: + from pytest import MonkeyPatch + + +async def create_data_connector( + sanic_client: SanicASGITestClient, headers: dict[str, Any], namespace: str, slug: str, private: bool +) -> Response: + storage_config = { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + } + payload = { + "name": slug, + "namespace": namespace, + "slug": slug, + "storage": storage_config, + "visibility": "private" if private else "public", + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=headers, json=payload) + return cast(Response, response) + @pytest.mark.asyncio -async def test_post_data_connector(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers) -> None: +async def test_post_data_connector( + sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers, app_manager +) -> None: payload = { "name": "My data connector", "slug": "my-data-connector", "description": "A data connector", "visibility": "public", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), "storage": { "configuration": { "type": "s3", @@ -44,16 +78,74 @@ async def test_post_data_connector(sanic_client: SanicASGITestClient, regular_us assert data_connector.get("visibility") == "public" assert data_connector.get("description") == "A data connector" assert set(data_connector.get("keywords")) == {"keyword 1", "keyword.2", "keyword-3", "KEYWORD_4"} + app_manager.metrics.data_connector_created.assert_called_once() + + # Check that we can retrieve the data connector + _, response = await sanic_client.get(f"/api/data/data_connectors/{data_connector['id']}", headers=user_headers) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == data_connector["id"] + + # Check that we can retrieve the data connector by slug + _, response = await sanic_client.get( + f"/api/data/namespaces/{data_connector['namespace']}/data_connectors/{data_connector['slug']}", + headers=user_headers, + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == data_connector["id"] + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "doi", ["10.5281/zenodo.2600782", "doi:10.5281/zenodo.2600782", "https://doi.org/10.5281/zenodo.2600782"] +) +async def test_post_global_data_connector( + sanic_client: SanicASGITestClient, user_headers: dict[str, str], monkeypatch: "MonkeyPatch", doi: str +) -> None: + # The DOI resolver seems to block requests from GitHub action runners, so we mock its response + metadata = RCloneDOIMetadata( + DOI="10.5281/zenodo.2600782", + URL="https://doi.org/10.5281/zenodo.2600782", + metadataURL="https://zenodo.org/api/records/3542869", + provider="zenodo", + ) + _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + + payload = { + "storage": { + "configuration": {"type": "doi", "doi": doi}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + data_connector = response.json + assert data_connector.get("name") == "SwissDataScienceCenter/renku-python: Version 0.7.2" + assert data_connector.get("slug") == "doi-10.5281-zenodo.2600782" + assert data_connector.get("storage") is not None + storage = data_connector["storage"] + assert storage.get("storage_type") == "doi" + assert storage.get("source_path") == "/" + assert storage.get("target_path") == "swissdatasciencecenter-renku-p-doi-10.5281-zenodo.2600782" + assert storage.get("readonly") is True + assert data_connector.get("visibility") == "public" + assert data_connector.get("description") is not None + assert set(data_connector.get("keywords")) == set() # Check that we can retrieve the data connector - _, response = await sanic_client.get(f"/api/data/data_connectors/{data_connector["id"]}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/data_connectors/{data_connector['id']}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None assert response.json.get("id") == data_connector["id"] # Check that we can retrieve the data connector by slug _, response = await sanic_client.get( - f"/api/data/namespaces/{data_connector["namespace"]}/data_connectors/{data_connector["slug"]}", + f"/api/data/data_connectors/global/{data_connector['slug']}", headers=user_headers, ) assert response.status_code == 200, response.text @@ -61,6 +153,129 @@ async def test_post_data_connector(sanic_client: SanicASGITestClient, regular_us assert response.json.get("id") == data_connector["id"] +@pytest.mark.asyncio +async def test_post_global_data_connector_dataverse( + sanic_client: SanicASGITestClient, user_headers: dict[str, str], monkeypatch: "MonkeyPatch" +) -> None: + # The DOI resolver seems to block requests from GitHub action runners, so we mock its response + metadata = RCloneDOIMetadata( + DOI="10.7910/DVN/2SA6SN", + URL="https://doi.org/10.7910/DVN/2SA6SN", + metadataURL="https://dataverse.harvard.edu/api/datasets/:persistentId/?persistentId=doi%3A10.7910%2FDVN%2F2SA6SN", + provider="dataverse", + ) + _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + + doi = "10.7910/DVN/2SA6SN" + payload = { + "storage": { + "configuration": {"type": "doi", "doi": doi}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + data_connector = response.json + assert data_connector.get("name") == "Dataset metadata of known Dataverse installations, August 2024" + assert data_connector.get("slug") == "doi-10.7910-dvn-2sa6sn" + assert data_connector.get("storage") is not None + storage = data_connector["storage"] + assert storage.get("storage_type") == "doi" + assert storage.get("source_path") == "/" + assert storage.get("target_path") == "dataset-metadata-of-known-data-doi-10.7910-dvn-2sa6sn" + assert storage.get("readonly") is True + assert data_connector.get("visibility") == "public" + assert data_connector.get("description") is not None + assert set(data_connector.get("keywords")) == {"dataset metadata", "dataverse", "metadata blocks"} + + +@pytest.mark.asyncio +async def test_post_global_data_connector_unauthorized( + sanic_client: SanicASGITestClient, +) -> None: + payload = { + "storage": { + "configuration": {"type": "doi", "doi": "10.5281/zenodo.15174623"}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", json=payload) + + assert response.status_code == 401, response.text + + +@pytest.mark.asyncio +async def test_post_global_data_connector_invalid_doi( + sanic_client: SanicASGITestClient, + user_headers, +) -> None: + payload = { + "storage": { + "configuration": {"type": "doi", "doi": "foo/bar"}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", headers=user_headers, json=payload) + + assert response.status_code == 422, response.text + + +@pytest.mark.asyncio +async def test_post_global_data_connector_no_duplicates( + sanic_client: SanicASGITestClient, user_headers: dict[str, str], monkeypatch: "MonkeyPatch" +) -> None: + # The DOI resolver seems to block requests from GitHub action runners, so we mock its response + metadata = RCloneDOIMetadata( + DOI="10.5281/zenodo.2600782", + URL="https://doi.org/10.5281/zenodo.2600782", + metadataURL="https://zenodo.org/api/records/3542869", + provider="zenodo", + ) + _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + + doi = "10.5281/zenodo.2600782" + payload = { + "storage": { + "configuration": {"type": "doi", "doi": doi}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + data_connector = response.json + data_connector_id = data_connector["id"] + assert data_connector.get("name") == "SwissDataScienceCenter/renku-python: Version 0.7.2" + assert data_connector.get("slug") == "doi-10.5281-zenodo.2600782" + + # Check that posting the same DOI returns the same data connector ULID + doi = "https://doi.org/10.5281/zenodo.2600782" + payload = { + "storage": { + "configuration": {"type": "doi", "doi": doi}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", headers=user_headers, json=payload) + + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == data_connector_id + + @pytest.mark.asyncio async def test_post_data_connector_with_s3_url( sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers @@ -70,7 +285,7 @@ async def test_post_data_connector_with_s3_url( "slug": "my-data-connector", "description": "A data connector", "visibility": "public", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), "storage": { "storage_url": "s3://my-bucket", "target_path": "my/target", @@ -107,7 +322,7 @@ async def test_post_data_connector_with_azure_url( "slug": "my-data-connector", "description": "A data connector", "visibility": "public", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), "storage": { "storage_url": "azure://mycontainer/myfolder", "target_path": "my/target", @@ -146,16 +361,14 @@ async def test_post_data_connector_with_invalid_visibility(sanic_client: SanicAS @pytest.mark.asyncio -@pytest.mark.parametrize("keyword", ["invalid chars '", "Nön English"]) -async def test_post_data_connector_with_invalid_keywords( - sanic_client: SanicASGITestClient, user_headers, keyword -) -> None: +async def test_post_data_connector_with_invalid_keywords(sanic_client: SanicASGITestClient, user_headers) -> None: + keyword = "this keyword is way too long........................................................................" payload = {"keywords": [keyword]} _, response = await sanic_client.post("/api/data/data_connectors", headers=user_headers, json=payload) assert response.status_code == 422, response.text - assert "String should match pattern '^[A-Za-z0-9\\s\\-_.]*$'" in response.json["error"]["message"] + assert "String should have at most 99 characters" in response.json["error"]["message"] @pytest.mark.asyncio @@ -164,7 +377,7 @@ async def test_post_data_connector_with_invalid_namespace( user_headers, member_1_user: UserInfo, ) -> None: - namespace = member_1_user.namespace.slug + namespace = member_1_user.namespace.path.serialize() _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) assert response.status_code == 200, response.text @@ -318,6 +531,17 @@ async def test_get_one_data_connector(sanic_client: SanicASGITestClient, create_ assert data_connector.get("slug") == "a-new-data-connector" +@pytest.mark.asyncio +async def test_head_one_data_connector(sanic_client: SanicASGITestClient, create_data_connector, user_headers) -> None: + data_connector = await create_data_connector("A new data connector") + data_connector_id = data_connector["id"] + + _, response = await sanic_client.head(f"/api/data/data_connectors/{data_connector_id}", headers=user_headers) + + assert response.status_code == 200, response.text + assert response.json is None + + @pytest.mark.asyncio async def test_get_one_by_slug_data_connector( sanic_client: SanicASGITestClient, create_data_connector, user_headers @@ -570,7 +794,7 @@ async def test_patch_data_connector_namespace( # Check that we can retrieve the data connector by slug _, response = await sanic_client.get( - f"/api/data/namespaces/{data_connector["namespace"]}/data_connectors/{data_connector["slug"]}", + f"/api/data/namespaces/{data_connector['namespace']}/data_connectors/{data_connector['slug']}", headers=user_headers, ) assert response.status_code == 200, response.text @@ -582,7 +806,7 @@ async def test_patch_data_connector_namespace( async def test_patch_data_connector_with_invalid_namespace( sanic_client: SanicASGITestClient, create_data_connector, user_headers, member_1_user: UserInfo ) -> None: - namespace = member_1_user.namespace.slug + namespace = member_1_user.namespace.path.serialize() _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) assert response.status_code == 200, response.text data_connector = await create_data_connector("My data connector") @@ -596,7 +820,7 @@ async def test_patch_data_connector_with_invalid_namespace( f"/api/data/data_connectors/{data_connector_id}", headers=headers, json=patch ) - assert response.status_code == 403, response.text + assert response.status_code == 404, response.text assert "you do not have sufficient permissions" in response.json["error"]["message"] @@ -640,6 +864,119 @@ async def test_patch_data_connector_as_editor( assert response.json.get("description") == "A new description" +@pytest.mark.asyncio +async def test_patch_data_connector_slug( + sanic_client: SanicASGITestClient, + create_data_connector, + user_headers, +) -> None: + await create_data_connector("Data connector 1") + await create_data_connector("Data connector 2") + data_connector = await create_data_connector("My data connector") + data_connector_id = data_connector["id"] + namespace = data_connector["namespace"] + old_slug = data_connector["slug"] + await create_data_connector("Data connector 3") + + # Patch a data connector + headers = merge_headers(user_headers, {"If-Match": data_connector["etag"]}) + new_slug = "some-updated-slug" + patch = {"slug": new_slug} + _, response = await sanic_client.patch( + f"/api/data/data_connectors/{data_connector_id}", headers=headers, json=patch + ) + + assert response.status_code == 200, response.text + + # Check that the data connector's slug has been updated + _, response = await sanic_client.get(f"/api/data/data_connectors/{data_connector_id}", headers=user_headers) + assert response.status_code == 200, response.text + data_connector = response.json + assert data_connector["id"] == data_connector_id + assert data_connector["name"] == "My data connector" + assert data_connector["namespace"] == namespace + assert data_connector["slug"] == new_slug + + # Check that we can get the data connector with the new slug + _, response = await sanic_client.get( + f"/api/data/namespaces/{namespace}/data_connectors/{new_slug}", headers=user_headers + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == data_connector_id + assert data_connector["namespace"] == namespace + assert data_connector["slug"] == new_slug + + # Check that we can get the data connector with the old slug + _, response = await sanic_client.get( + f"/api/data/namespaces/{namespace}/data_connectors/{old_slug}", headers=user_headers + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == data_connector_id + assert data_connector["namespace"] == namespace + assert data_connector["slug"] == new_slug + + +@pytest.mark.asyncio +async def test_patch_global_data_connector( + sanic_client: SanicASGITestClient, + user_headers: dict[str, str], + admin_headers: dict[str, str], + monkeypatch: "MonkeyPatch", +) -> None: + # The DOI resolver seems to block requests from GitHub action runners, so we mock its response + metadata = RCloneDOIMetadata( + DOI="10.5281/zenodo.2600782", + URL="https://doi.org/10.5281/zenodo.2600782", + metadataURL="https://zenodo.org/api/records/3542869", + provider="zenodo", + ) + _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + + doi = "10.5281/zenodo.2600782" + payload = { + "storage": { + "configuration": {"type": "doi", "doi": doi}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + data_connector = response.json + data_connector_id = data_connector["id"] + assert data_connector.get("name") == "SwissDataScienceCenter/renku-python: Version 0.7.2" + + # Check that a regular user cannot patch a global data connector + headers = merge_headers(user_headers, {"If-Match": data_connector["etag"]}) + payload = {"name": "New name", "description": "new description"} + + _, response = await sanic_client.patch( + f"/api/data/data_connectors/{data_connector_id}", headers=headers, json=payload + ) + + assert response.status_code == 404, response.text + + # Check that an admin user can delete a global data connector + headers = merge_headers(admin_headers, {"If-Match": data_connector["etag"]}) + + _, response = await sanic_client.patch( + f"/api/data/data_connectors/{data_connector_id}", headers=headers, json=payload + ) + + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == data_connector_id + assert response.json.get("name") == "New name" + assert response.json.get("slug") == data_connector["slug"] + assert response.json.get("description") == "new description" + assert response.json.get("storage") == data_connector["storage"] + + @pytest.mark.asyncio async def test_delete_data_connector(sanic_client: SanicASGITestClient, create_data_connector, user_headers) -> None: await create_data_connector("Data connector 1") @@ -657,6 +994,54 @@ async def test_delete_data_connector(sanic_client: SanicASGITestClient, create_d assert {dc["name"] for dc in response.json} == {"Data connector 1", "Data connector 3"} +@pytest.mark.asyncio +async def test_delete_global_data_connector( + sanic_client: SanicASGITestClient, + user_headers: dict[str, str], + admin_headers: dict[str, str], + monkeypatch: "MonkeyPatch", +) -> None: + # The DOI resolver seems to block requests from GitHub action runners, so we mock its response + metadata = RCloneDOIMetadata( + DOI="10.5281/zenodo.2600782", + URL="https://doi.org/10.5281/zenodo.2600782", + metadataURL="https://zenodo.org/api/records/3542869", + provider="zenodo", + ) + _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + + doi = "10.5281/zenodo.2600782" + payload = { + "storage": { + "configuration": {"type": "doi", "doi": doi}, + "source_path": "", + "target_path": "", + }, + } + + _, response = await sanic_client.post("/api/data/data_connectors/global", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + data_connector = response.json + data_connector_id = data_connector["id"] + + # Check that a regular user cannot delete a global data connector + _, response = await sanic_client.delete(f"/api/data/data_connectors/{data_connector_id}", headers=user_headers) + + assert response.status_code == 404, response.text + + # Check that an admin user can delete a global data connector + _, response = await sanic_client.delete(f"/api/data/data_connectors/{data_connector_id}", headers=admin_headers) + + assert response.status_code == 204, response.text + + _, response = await sanic_client.get("/api/data/data_connectors") + + assert response.status_code == 200, response.text + assert {dc["name"] for dc in response.json} == set() + + @pytest.mark.asyncio async def test_get_data_connector_project_links_empty( sanic_client: SanicASGITestClient, create_data_connector, user_headers @@ -776,7 +1161,7 @@ async def test_post_data_connector_project_link_unauthorized_if_not_project_edit @pytest.mark.asyncio -async def test_post_data_connector_project_link_unauthorized_if_not_data_connector_editor( +async def test_post_data_connector_project_link_succeeds_if_not_data_connector_editor( sanic_client: SanicASGITestClient, create_data_connector, create_project, @@ -810,7 +1195,7 @@ async def test_post_data_connector_project_link_unauthorized_if_not_data_connect f"/api/data/data_connectors/{data_connector_id}/project_links", headers=member_1_headers, json=payload ) - assert response.status_code == 404, response.text + assert response.status_code == 201, response.text @pytest.mark.asyncio @@ -821,6 +1206,7 @@ async def test_post_data_connector_project_link_public_data_connector( user_headers, member_1_headers, member_1_user, + app_manager, ) -> None: data_connector = await create_data_connector( "Data connector 1", user=member_1_user, headers=member_1_headers, visibility="public" @@ -846,11 +1232,12 @@ async def test_post_data_connector_project_link_public_data_connector( assert link.get("data_connector_id") == data_connector_id assert link.get("project_id") == project_id assert link.get("created_by") == "user" + app_manager.metrics.data_connector_linked.assert_called_once() @pytest.mark.asyncio @pytest.mark.parametrize("project_role", ["viewer", "editor", "owner"]) -async def test_post_data_connector_project_link_extends_read_access( +async def test_post_data_connector_project_link_doesnt_extend_read_access( sanic_client: SanicASGITestClient, create_data_connector, create_project, @@ -882,14 +1269,9 @@ async def test_post_data_connector_project_link_extends_read_access( ) assert response.status_code == 201, response.text - # Check that "member_1" can now view the data connector + # Check that "member_1" still cannot view the data connector _, response = await sanic_client.get(f"/api/data/data_connectors/{data_connector_id}", headers=member_1_headers) - assert response.status_code == 200, response.text - assert response.json is not None - assert response.json.get("id") == data_connector_id - assert response.json.get("name") == "Data connector 1" - assert response.json.get("namespace") == "user.doe" - assert response.json.get("slug") == "data-connector-1" + assert response.status_code == 404, response.text @pytest.mark.asyncio @@ -953,7 +1335,7 @@ async def test_delete_data_connector_project_link( link = response.json _, response = await sanic_client.delete( - f"/api/data/data_connectors/{data_connector_id}/project_links/{link["id"]}", headers=user_headers + f"/api/data/data_connectors/{data_connector_id}/project_links/{link['id']}", headers=user_headers ) assert response.status_code == 204, response.text @@ -976,7 +1358,7 @@ async def test_delete_data_connector_project_link( # Check that calling delete again returns a 204 _, response = await sanic_client.delete( - f"/api/data/data_connectors/{data_connector_id}/project_links/{link["id"]}", headers=user_headers + f"/api/data/data_connectors/{data_connector_id}/project_links/{link['id']}", headers=user_headers ) assert response.status_code == 204, response.text @@ -1074,13 +1456,13 @@ async def test_patch_data_connector_secrets( assert len(secrets) == 2 assert {s["name"] for s in secrets} == {"access_key_id", "secret_access_key"} - payload = [ - {"name": "not_sensitive", "value": "not_sensitive_value"}, - ] - _, response = await sanic_client.patch( - f"/api/data/data_connectors/{data_connector_id}/secrets", headers=user_headers, json=payload - ) - assert response.status_code == 422, response.json + # Check that the data connector is referenced from the first user secret + user_secret_id = secrets[0]["secret_id"] + _, response = await sanic_client.get(f"/api/data/user/secrets/{user_secret_id}", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + assert response.json.get("data_connector_ids") is not None + assert {id for id in response.json.get("data_connector_ids")} == {data_connector_id} @pytest.mark.asyncio @@ -1319,3 +1701,716 @@ async def test_get_data_connector_permissions_cascading_from_group( assert permissions.get("write") == expected_permissions["write"] assert permissions.get("delete") == expected_permissions["delete"] assert permissions.get("change_membership") == expected_permissions["change_membership"] + + +@pytest.mark.asyncio +async def test_creating_dc_in_project(sanic_client, user_headers) -> None: + # Create a group i.e. /test1 + payload = { + "name": "test1", + "slug": "test1", + "description": "Group 1 Description", + } + _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # Create a project in the group /test1/prj1 + payload = { + "name": "prj1", + "namespace": "test1", + "slug": "prj1", + } + _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + project_id = response.json["id"] + + # Ensure there is only one project + _, response = await sanic_client.get("/api/data/projects", headers=user_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 1 + + # Create a data connector in the project /test1/proj1/dc1 + dc_namespace = "test1/prj1" + payload = { + "name": "dc1", + "namespace": dc_namespace, + "slug": "dc1", + "storage": { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + }, + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + assert response.json["namespace"] == dc_namespace + dc_id = response.json["id"] + + # Ensure there is only one project + _, response = await sanic_client.get("/api/data/projects", headers=user_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 1 + + # Ensure that you can list the data connector + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc_id}", headers=user_headers) + assert response.status_code == 200, response.text + + # Link the data connector to the project + payload = {"project_id": project_id} + _, response = await sanic_client.post( + f"/api/data/data_connectors/{dc_id}/project_links", headers=user_headers, json=payload + ) + assert response.status_code == 201, response.text + + # Ensure that you can see the data connector link + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc_id}/project_links", headers=user_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 1 + dc_link = response.json[0] + assert dc_link["project_id"] == project_id + assert dc_link["data_connector_id"] == dc_id + + # Ensure that you can list data connectors + _, response = await sanic_client.get("/api/data/data_connectors", headers=user_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 1 + assert response.json[0]["namespace"] == dc_namespace + + +@pytest.mark.asyncio +async def test_creating_dc_in_project_no_leak_to_other_project(sanic_client, user_headers, member_1_headers) -> None: + # Create a project owned by member_1 + payload = { + "name": "Project 1", + "namespace": "member-1.doe", + "slug": "project-1", + } + _, res = await sanic_client.post("/api/data/projects", headers=member_1_headers, json=payload) + assert res.status_code == 201, res.text + + payload = { + "name": "Project 1", + "namespace": "user.doe", + "slug": "project-1", + } + _, res = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + assert res.status_code == 201, res.text + project = res.json + project_path = f"{project['namespace']}/{project['slug']}" + + payload = { + "name": "My data connector", + "namespace": project_path, + "slug": "my-dc", + "storage": { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + }, + } + _, res = await sanic_client.post("/api/data/data_connectors", headers=user_headers, json=payload) + assert res.status_code == 201, res.text + assert res.json is not None + dc = res.json + assert dc.get("id") is not None + assert dc.get("name") == "My data connector" + assert dc.get("namespace") == project_path + assert dc.get("slug") == "my-dc" + + +@pytest.mark.asyncio +async def test_users_cannot_see_private_data_connectors_in_project( + sanic_client, + member_1_headers, + member_2_user: UserInfo, + member_2_headers, + user_headers, + regular_user: UserInfo, +) -> None: + # Create a group i.e. /test1 + group_slug = "test1" + payload = { + "name": group_slug, + "slug": group_slug, + "description": "Group 1 Description", + } + _, response = await sanic_client.post("/api/data/groups", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + + # Add member_2 as reader on the group + payload = [ + { + "id": member_2_user.id, + "role": "viewer", + } + ] + _, response = await sanic_client.patch( + f"/api/data/groups/{group_slug}/members", headers=member_1_headers, json=payload + ) + assert response.status_code == 200, response.text + + # Create a public project in the group /test1/prj1 + payload = { + "name": "prj1", + "namespace": "test1", + "slug": "prj1", + "visibility": "public", + } + _, response = await sanic_client.post("/api/data/projects", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + project_id = response.json["id"] + + # Create a private data connector in the group + dc_namespace = "test1" + storage_config = { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + } + payload = { + "name": "dc-private", + "namespace": dc_namespace, + "slug": "dc-private", + "storage": storage_config, + "visibility": "private", + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + assert response.json["namespace"] == dc_namespace + group_dc_id = response.json["id"] + + # Link the private data connector to the project + payload = {"project_id": project_id} + _, response = await sanic_client.post( + f"/api/data/data_connectors/{group_dc_id}/project_links", headers=member_1_headers, json=payload + ) + assert response.status_code == 201, response.text + + # Create a data connector in the project /test1/proj1/dc1 + dc_namespace = "test1/prj1" + payload = { + "name": "dc1", + "namespace": dc_namespace, + "slug": "dc1", + "storage": storage_config, + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + assert response.json["namespace"] == dc_namespace + project_dc_id = response.json["id"] + + # Link the data connector to the project + payload = {"project_id": project_id} + _, response = await sanic_client.post( + f"/api/data/data_connectors/{project_dc_id}/project_links", headers=member_1_headers, json=payload + ) + assert response.status_code == 201, response.text + + # Ensure that member_1 and member_2 can see both data connectors and their links + for req_headers in [member_1_headers, member_2_headers]: + _, response = await sanic_client.get("/api/data/data_connectors", headers=req_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 2 + assert response.json[0]["id"] == project_dc_id + assert response.json[1]["id"] == group_dc_id + _, response = await sanic_client.get( + f"/api/data/projects/{project_id}/data_connector_links", headers=req_headers + ) + assert len(response.json) == 2 + assert response.json[0]["data_connector_id"] == group_dc_id + assert response.json[1]["data_connector_id"] == project_dc_id + + # The project is public so user should see it + _, response = await sanic_client.get(f"/api/data/projects/{project_id}", headers=user_headers) + assert response.status_code == 200, response.text + # User is not part of the project and the data connector is private so they should not see any data connectors + _, response = await sanic_client.get("/api/data/data_connectors", headers=user_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 0 + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/data_connector_links", headers=user_headers) + assert len(response.json) == 0 + + # Anonymous users should see the project but not any of the DCs or the links + _, response = await sanic_client.get(f"/api/data/projects/{project_id}") + assert response.status_code == 200, response.text + _, response = await sanic_client.get("/api/data/data_connectors") + assert response.status_code == 200, response.text + assert len(response.json) == 0 + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/data_connector_links") + assert len(response.json) == 0 + + # Add user to the project + payload = [ + { + "id": regular_user.id, + "role": "viewer", + } + ] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/members", headers=member_1_headers, json=payload + ) + assert response.status_code == 200, response.text + + # Now since the user is part of the project they should see only the project DC but not the private one from + # the group that user does not have access to + _, response = await sanic_client.get("/api/data/data_connectors", headers=user_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 1 + assert response.json[0]["id"] == project_dc_id + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/data_connector_links", headers=user_headers) + assert response.json[0]["data_connector_id"] == project_dc_id + + +@pytest.mark.asyncio +async def test_number_of_inaccessible_data_connector_links_in_project( + sanic_client, + member_1_user: UserInfo, + member_1_headers, + regular_user: UserInfo, + user_headers, +) -> None: + # Create a public project + payload = { + "name": "prj1", + "namespace": member_1_user.namespace.path.serialize(), + "slug": "prj1", + "visibility": "public", + } + _, response = await sanic_client.post("/api/data/projects", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + project_id = response.json["id"] + + # Add user to the project + payload = [ + { + "id": regular_user.id, + "role": "viewer", + } + ] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/members", headers=member_1_headers, json=payload + ) + assert response.status_code == 200, response.text + + # Create a private data connector in the project + storage_config = { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + } + dc_namespace = f"{member_1_user.namespace.path.serialize()}/prj1" + payload = { + "name": "dc1", + "namespace": dc_namespace, + "slug": "dc1", + "storage": storage_config, + "visibility": "private", + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + assert response.json["namespace"] == dc_namespace + project_dc_id = response.json["id"] + + # Link the data connector to the project + payload = {"project_id": project_id} + _, response = await sanic_client.post( + f"/api/data/data_connectors/{project_dc_id}/project_links", headers=member_1_headers, json=payload + ) + assert response.status_code == 201, response.text + + # Create a private data connector in the owner user namespace + storage_config = { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + } + payload = { + "name": "dc1", + "namespace": member_1_user.namespace.path.serialize(), + "slug": "dc1", + "storage": storage_config, + "visibility": "private", + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + assert response.json["namespace"] == member_1_user.namespace.path.serialize() + project_dc_id = response.json["id"] + + # Link the data connector to the project + payload = {"project_id": project_id} + _, response = await sanic_client.post( + f"/api/data/data_connectors/{project_dc_id}/project_links", headers=member_1_headers, json=payload + ) + assert response.status_code == 201, response.text + + # Ensure that anonymous users cannot see both of the data connectors + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/inaccessible_data_connector_links") + assert "count" in response.json + assert response.json["count"] == 2 + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/data_connector_links") + assert len(response.json) == 0 + + # Ensure that the owner gets a zero in their inaccessible data connectors count + _, response = await sanic_client.get( + f"/api/data/projects/{project_id}/inaccessible_data_connector_links", headers=member_1_headers + ) + assert "count" in response.json + assert response.json["count"] == 0 + _, response = await sanic_client.get( + f"/api/data/projects/{project_id}/data_connector_links", headers=member_1_headers + ) + assert len(response.json) == 2 + + # Ensure that the project member can NOT see 1 DC - the owners private DC + _, response = await sanic_client.get( + f"/api/data/projects/{project_id}/inaccessible_data_connector_links", headers=user_headers + ) + assert "count" in response.json + assert response.json["count"] == 1 + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/data_connector_links", headers=user_headers) + assert len(response.json) == 1 + + +@dataclass +class DataConnectorTestCase: + ns_kind: NamespaceKind + visibility: Visibility | None = None + + def __str__(self) -> str: + if self.visibility: + return f"<{self.ns_kind.value} {self.visibility.value}>" + else: + return f"<{self.ns_kind.value}>" + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "origin,destination,dc_visibility", + [ + # Moving from project namespace + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PUBLIC, + ), + # Moving from user namespace + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.user), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PRIVATE, + ), + # Moving from group namespace + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.project, Visibility.PRIVATE), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.project, Visibility.PUBLIC), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.group), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PRIVATE, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PUBLIC, + ), + ( + DataConnectorTestCase(NamespaceKind.group), + DataConnectorTestCase(NamespaceKind.user), + Visibility.PUBLIC, + ), + ], + ids=lambda x: str(x), +) +async def test_move_data_connector( + sanic_client: SanicASGITestClient, + member_1_user: UserInfo, + member_1_headers: dict, + origin: DataConnectorTestCase, + destination: DataConnectorTestCase, + dc_visibility: Visibility, +) -> None: + # Create origin namespace + linked_project_id: str | None = None + match origin.ns_kind: + case NamespaceKind.group: + payload = { + "name": "origin", + "slug": "origin", + } + _, response = await sanic_client.post("/api/data/groups", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + origin_path = NamespacePath.from_strings(response.json["slug"]) + case NamespaceKind.user: + origin_path = member_1_user.namespace.path + case NamespaceKind.project: + payload = { + "name": "origin", + "namespace": member_1_user.namespace.path.serialize(), + "slug": "origin", + "visibility": "public" if origin.visibility == Visibility.PUBLIC else "private", + } + _, response = await sanic_client.post("/api/data/projects", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + origin_path = ProjectPath.from_strings(response.json["namespace"], response.json["slug"]) + linked_project_id = response.json["id"] + + # Create the destination namespace + match destination.ns_kind: + case NamespaceKind.group: + payload = { + "name": "destination", + "slug": "destination", + } + _, response = await sanic_client.post("/api/data/groups", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + destination_path = NamespacePath.from_strings(response.json["slug"]) + destination_id = response.json["id"] + case NamespaceKind.user: + destination_path = member_1_user.namespace.path + destination_id = response.json["id"] + case NamespaceKind.project: + payload = { + "name": "destination", + "namespace": member_1_user.namespace.path.serialize(), + "slug": "destination", + "visibility": "public" if origin.visibility == Visibility.PUBLIC else "private", + } + _, response = await sanic_client.post("/api/data/projects", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + destination_path = ProjectPath.from_strings(response.json["namespace"], response.json["slug"]) + destination_id = response.json["id"] + + # Create the data connector + response = await create_data_connector( + sanic_client, member_1_headers, origin_path.serialize(), "dc1", private=dc_visibility == Visibility.PRIVATE + ) + assert response.status_code == 201, response.text + assert response.json["namespace"] == origin_path.serialize() + dc_id = response.json["id"] + dc_etag = response.json["etag"] + + # Create a project to link the DC to if the origin is not project + if not isinstance(origin_path, ProjectPath): + payload = { + "name": "dc_link_project", + "namespace": member_1_user.namespace.path.serialize(), + "slug": "dc_link_project", + "visibility": "private", + } + _, response = await sanic_client.post("/api/data/projects", headers=member_1_headers, json=payload) + assert response.status_code == 201, response.text + linked_project_id = response.json["id"] + + # Link the data connector a project + payload = {"project_id": linked_project_id} + _, response = await sanic_client.post( + f"/api/data/data_connectors/{dc_id}/project_links", headers=member_1_headers, json=payload + ) + assert response.status_code == 201, response.text + + # Move the data connector + payload = {"namespace": destination_path.serialize()} + headers = merge_headers(member_1_headers, {"If-Match": dc_etag}) + _, response = await sanic_client.patch(f"/api/data/data_connectors/{dc_id}", headers=headers, json=payload) + assert response.status_code == 200, response.text + assert response.json["namespace"] == destination_path.serialize() + assert response.json["visibility"] == dc_visibility.value + + # Check the data connector link remains unchaged after moving + _, response = await sanic_client.get( + f"/api/data/projects/{linked_project_id}/data_connector_links", headers=headers + ) + assert response.status_code == 200, response.text + assert len(response.json) == 1 + assert response.json[0]["data_connector_id"] == dc_id + + # Moving the data connector to the new project creates a link to it automatically + if isinstance(destination_path, ProjectPath): + _, response = await sanic_client.get( + f"/api/data/projects/{destination_id}/data_connector_links", headers=headers + ) + assert response.status_code == 200, response.text + assert len(response.json) == 1 + assert response.json[0]["data_connector_id"] == dc_id + + # Check that the number of namespaces is as expected + _, response = await sanic_client.get( + "/api/data/namespaces", headers=headers, params=dict(kinds=["group", "user", "project"], minimum_role="owner") + ) + assert response.status_code == 200, response.text + match origin.ns_kind, destination.ns_kind: + case NamespaceKind.group, NamespaceKind.project: + # The namespaces are the group, the project, the linked dc project and the user + expected_namespaces = 4 + case NamespaceKind.group, NamespaceKind.group: + # The namespaces are the 2 groups, the linked dc project and the user + expected_namespaces = 4 + case NamespaceKind.project, NamespaceKind.user: + # There is no new namespaces for linked project or for the destination user namespace + expected_namespaces = 2 + case _: + # The user, the source and the destination namespace + expected_namespaces = 3 + assert len(response.json) == expected_namespaces + + +def test_description_cleanup() -> None: + description_html = """

A description

+

Some more text...

+ """ + + description_text = core._html_to_text(description_html) + + expected = """A description\nSome more text...""" + assert description_text == expected + + +def _mock_get_doi_metadata(metadata: RCloneDOIMetadata, sanic_client: SanicASGITestClient, monkeypatch: "MonkeyPatch"): + """Mock the RCloneValidator.get_doi_metadata method.""" + + # The DOI resolver seems to block requests from GitHub action runners, so we mock its response + validator = sanic_client.sanic_app.ctx._dependencies.r_clone_validator + _orig_get_doi_metadata = validator.get_doi_metadata + + async def _mock_get_doi_metadata(*args, **kwargs) -> RCloneDOIMetadata: + doi_metadata = await _orig_get_doi_metadata(*args, **kwargs) + if doi_metadata is not None: + assert doi_metadata == metadata + return doi_metadata + + warnings.warn("Could not retrieve DOI metadata, returning saved one", stacklevel=2) + return metadata + + monkeypatch.setattr(validator, "get_doi_metadata", _mock_get_doi_metadata) diff --git a/test/bases/renku_data_services/data_api/test_groups.py b/test/bases/renku_data_services/data_api/test_groups.py index eccd4592a..11d2a6572 100644 --- a/test/bases/renku_data_services/data_api/test_groups.py +++ b/test/bases/renku_data_services/data_api/test_groups.py @@ -1,28 +1,19 @@ -from base64 import b64decode from datetime import datetime import pytest from sanic_testing.testing import SanicASGITestClient -from renku_data_services.app_config.config import Config from renku_data_services.authz.models import Role, Visibility -from renku_data_services.message_queue.avro_models.io.renku.events.v2 import ( - GroupAdded, - GroupMemberAdded, - GroupMemberRemoved, - GroupMemberUpdated, - GroupRemoved, - GroupUpdated, -) -from renku_data_services.message_queue.models import deserialize_binary +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.users.models import UserInfo from test.bases.renku_data_services.data_api.utils import merge_headers @pytest.mark.asyncio async def test_group_creation_basic( - sanic_client: SanicASGITestClient, user_headers: dict[str, str], app_config: Config + sanic_client: SanicASGITestClient, user_headers: dict[str, str], app_manager: DependencyManager ) -> None: + await app_manager.search_updates_repo.clear_all() payload = { "name": "Group1", "slug": "group-1", @@ -38,23 +29,15 @@ async def test_group_creation_basic( assert group["description"] == payload["description"] assert group["created_by"] == "user" datetime.fromisoformat(group["creation_date"]) + app_manager.metrics.group_created.assert_called_once() - events = await app_config.event_repo.get_pending_events() - - group_events = [e for e in events if e.get_message_type() == "group.added"] - assert len(group_events) == 1 - group_event = deserialize_binary(b64decode(group_events[0].payload["payload"]), GroupAdded) - assert group_event.id == group["id"] - assert group_event.name == group["name"] - assert group_event.description == group["description"] - assert group_event.namespace == group["slug"] - - group_events = [e for e in events if e.get_message_type() == "memberGroup.added"] - assert len(group_events) == 1 - group_event = deserialize_binary(b64decode(group_events[0].payload["payload"]), GroupMemberAdded) - assert group_event.userId == "user" - assert group_event.groupId == group["id"] - assert group_event.role.value == "OWNER" + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 1 + assert search_updates[0].payload["id"] == group["id"] + assert search_updates[0].payload["name"] == group["name"] + assert search_updates[0].payload["description"] == group["description"] + assert search_updates[0].payload["slug"] == group["slug"] + assert search_updates[0].payload["path"] == group["slug"] _, response = await sanic_client.get("/api/data/groups", headers=user_headers) group = response.json @@ -102,9 +85,27 @@ async def test_group_pagination( assert res3_json[-1]["name"] == "group0" +@pytest.mark.asyncio +async def test_get_single_group(sanic_client, user_headers) -> None: + payload = { + "name": "Group1", + "slug": "group-1", + "description": "Group 1 Description", + } + _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + _, response = await sanic_client.get("/api/data/groups/%3f", headers=user_headers) + assert response.status_code == 404, response.text + _, response = await sanic_client.get("/api/data/groups/group-1", headers=user_headers) + assert response.status_code == 200, response.text + group = response.json + assert group["name"] == payload["name"] + assert group["slug"] == payload["slug"] + + @pytest.mark.asyncio async def test_group_patch_delete( - sanic_client: SanicASGITestClient, user_headers: dict[str, str], app_config: Config + sanic_client: SanicASGITestClient, user_headers: dict[str, str], app_manager: DependencyManager ) -> None: payload = { "name": "GroupOther", @@ -129,6 +130,7 @@ async def test_group_patch_delete( "slug": "group-2", "description": "Group 2 Description", } + await app_manager.search_updates_repo.clear_all() _, response = await sanic_client.patch("/api/data/groups/group-1", headers=user_headers, json=new_payload) assert response.status_code == 200, response.text @@ -137,16 +139,14 @@ async def test_group_patch_delete( assert group["slug"] == new_payload["slug"] assert group["description"] == new_payload["description"] - events = await app_config.event_repo.get_pending_events() - - group_events = [e for e in events if e.get_message_type() == "group.updated"] - assert len(group_events) == 1 - group_event = deserialize_binary(b64decode(group_events[0].payload["payload"]), GroupUpdated) - assert group_event.id == group["id"] - assert group_event.name == group["name"] - assert group_event.description == group["description"] - assert group_event.namespace == group["slug"] + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 1 + assert search_updates[0].payload["slug"] == group["slug"] + assert search_updates[0].payload["path"] == group["slug"] + for k in ["id", "name", "description"]: + assert search_updates[0].payload[k] == group[k] + await app_manager.search_updates_repo.clear_all() new_payload = {"slug": "group-other"} _, response = await sanic_client.patch("/api/data/groups/group-1", headers=user_headers, json=new_payload) assert response.status_code == 409 # The latest slug must be used to patch it is now group-2 @@ -154,12 +154,10 @@ async def test_group_patch_delete( _, response = await sanic_client.delete("/api/data/groups/group-2", headers=user_headers) assert response.status_code == 204 - events = await app_config.event_repo.get_pending_events() - - group_events = [e for e in events if e.get_message_type() == "group.removed"] - assert len(group_events) == 1 - group_event = deserialize_binary(b64decode(group_events[0].payload["payload"]), GroupRemoved) - assert group_event.id == group["id"] + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 1 + assert search_updates[0].payload["id"] == group["id"] + assert search_updates[0].payload["deleted"] _, response = await sanic_client.get("/api/data/groups/group-2", headers=user_headers) assert response.status_code == 404 @@ -167,7 +165,7 @@ async def test_group_patch_delete( @pytest.mark.asyncio async def test_group_members( - sanic_client: SanicASGITestClient, user_headers: dict[str, str], app_config: Config + sanic_client: SanicASGITestClient, user_headers: dict[str, str], app_manager: DependencyManager ) -> None: payload = { "name": "Group1", @@ -176,16 +174,19 @@ async def test_group_members( } _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) assert response.status_code == 201, response.text - group = response.json _, response = await sanic_client.get("/api/data/groups/group-1/members", headers=user_headers) assert response.status_code == 200, response.text members = response.json assert len(members) == 1 assert members[0]["id"] == "user" assert members[0]["role"] == "owner" + + await app_manager.search_updates_repo.clear_all() + new_members = [{"id": "member-1", "role": "viewer"}] _, response = await sanic_client.patch("/api/data/groups/group-1/members", headers=user_headers, json=new_members) assert response.status_code == 200 + app_manager.metrics.group_member_added.assert_called_once() _, response = await sanic_client.get("/api/data/groups/group-1/members", headers=user_headers) assert response.status_code == 200, response.text members = response.json @@ -194,14 +195,8 @@ async def test_group_members( assert member_1 is not None assert member_1["role"] == "viewer" - events = await app_config.event_repo.get_pending_events() - - group_events = sorted([e for e in events if e.get_message_type() == "memberGroup.added"], key=lambda e: e.id) - assert len(group_events) == 2 - group_event = deserialize_binary(b64decode(group_events[1].payload["payload"]), GroupMemberAdded) - assert group_event.userId == member_1["id"] - assert group_event.groupId == group["id"] - assert group_event.role.value == "VIEWER" + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 0 @pytest.mark.asyncio @@ -209,7 +204,7 @@ async def test_removing_single_group_owner_not_allowed( sanic_client: SanicASGITestClient, user_headers: dict[str, str], member_1_headers: dict[str, str], - app_config: Config, + app_manager: DependencyManager, ) -> None: payload = { "name": "Group1", @@ -219,12 +214,14 @@ async def test_removing_single_group_owner_not_allowed( # Create a group _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) assert response.status_code == 201, response.text - group = response.json _, response = await sanic_client.get("/api/data/groups/group-1/members", headers=user_headers) assert response.status_code == 200, response.text res_json = response.json assert len(res_json) == 1 + + await app_manager.search_updates_repo.clear_all() + # Add a member new_members = [{"id": "member-1", "role": "editor"}] _, response = await sanic_client.patch("/api/data/groups/group-1/members", headers=user_headers, json=new_members) @@ -241,26 +238,15 @@ async def test_removing_single_group_owner_not_allowed( _, response = await sanic_client.patch("/api/data/groups/group-1/members", headers=user_headers, json=new_members) assert response.status_code == 200 - events = await app_config.event_repo.get_pending_events() - - group_events = [e for e in events if e.get_message_type() == "memberGroup.updated"] - assert len(group_events) == 1 - group_event = deserialize_binary(b64decode(group_events[0].payload["payload"]), GroupMemberUpdated) - assert group_event.userId == "member-1" - assert group_event.groupId == group["id"] - assert group_event.role.value == "OWNER" + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 0 # Removing the original owner now works _, response = await sanic_client.delete("/api/data/groups/group-1/members/user", headers=user_headers) assert response.status_code == 204 - events = await app_config.event_repo.get_pending_events() - - group_events = [e for e in events if e.get_message_type() == "memberGroup.removed"] - assert len(group_events) == 1 - group_event = deserialize_binary(b64decode(group_events[0].payload["payload"]), GroupMemberRemoved) - assert group_event.userId == "user" - assert group_event.groupId == group["id"] + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 0 # Check that only one member remains _, response = await sanic_client.get("/api/data/groups/group-1/members", headers=member_1_headers) @@ -270,12 +256,28 @@ async def test_removing_single_group_owner_not_allowed( assert response.json[0]["role"] == "owner" +@pytest.mark.asyncio +async def test_delete_group_member_invalid(sanic_client: SanicASGITestClient, user_headers: dict[str, str]) -> None: + payload = { + "name": "demo group", + "slug": "demo-group", + } + _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + group = response.json + group_slug = group["slug"] + + _, response = await sanic_client.delete(f"/api/data/groups/{group_slug}/members/%3A", headers=user_headers) + + assert response.status_code == 422, response.text + + @pytest.mark.asyncio async def test_cannot_change_role_for_last_group_owner( sanic_client: SanicASGITestClient, user_headers: dict[str, str], regular_user: UserInfo, - app_config: Config, + app_manager: DependencyManager, member_1_headers: dict[str, str], ) -> None: payload = { diff --git a/test/bases/renku_data_services/data_api/test_message_queue.py b/test/bases/renku_data_services/data_api/test_message_queue.py deleted file mode 100644 index f6ed6656c..000000000 --- a/test/bases/renku_data_services/data_api/test_message_queue.py +++ /dev/null @@ -1,122 +0,0 @@ -import asyncio - -import pytest - -import renku_data_services.message_queue.blueprints -from test.bases.renku_data_services.data_api.utils import dataclass_to_str, deserialize_event - - -@pytest.fixture -def _reprovisioning(sanic_client, user_headers): - """Wait for the data service to finish the reprovisioning task.""" - - async def wait_helper(): - total_wait_time = 0 - while True: - await asyncio.sleep(0.1) - total_wait_time += 0.1 - - _, response = await sanic_client.get("/api/data/message_queue/reprovision", headers=user_headers) - - if response.status_code == 404: - break - elif total_wait_time > 30: - assert False, "Reprovisioning was not finished after 30 seconds" - - return wait_helper - - -@pytest.mark.asyncio -async def test_message_queue_reprovisioning( - sanic_client, app_config, create_project, create_group, admin_headers, project_members, _reprovisioning -) -> None: - await create_project("Project 1") - await create_project("Project 2", visibility="public") - await create_project("Project 3", admin=True) - await create_project("Project 4", admin=True, visibility="public", members=project_members) - - await create_group("Group 1") - await create_group("Group 2", admin=True) - await create_group("Group 3", members=project_members) - - events = await app_config.event_repo.get_pending_events() - - # NOTE: Clear all events before reprovisioning - await app_config.event_repo.delete_all_events() - - _, response = await sanic_client.post("/api/data/message_queue/reprovision", headers=admin_headers) - - assert response.status_code == 201, response.text - assert response.json["id"] is not None - assert response.json["start_date"] is not None - - await _reprovisioning() - - reprovisioning_events = await app_config.event_repo.get_pending_events() - - events_before = {dataclass_to_str(deserialize_event(e)) for e in events} - events_after = {dataclass_to_str(deserialize_event(e)) for e in reprovisioning_events[1:-1]} - - assert events_after == events_before - - -@pytest.mark.asyncio -async def test_message_queue_only_admins_can_start_reprovisioning(sanic_client, user_headers) -> None: - _, response = await sanic_client.post("/api/data/message_queue/reprovision", headers=user_headers) - - assert response.status_code == 403, response.text - assert "You do not have the required permissions for this operation." in response.json["error"]["message"] - - -async def long_reprovisioning_mock(*_, **__): - # NOTE: we do not delete the reprovision instance at the end to simulate a long reprovisioning - print("Running") - - -@pytest.mark.asyncio -async def test_message_queue_multiple_reprovisioning_not_allowed(sanic_client, admin_headers, monkeypatch) -> None: - monkeypatch.setattr(renku_data_services.message_queue.blueprints, "reprovision", long_reprovisioning_mock) - - _, response = await sanic_client.post("/api/data/message_queue/reprovision", headers=admin_headers) - assert response.status_code == 201, response.text - - _, response = await sanic_client.post("/api/data/message_queue/reprovision", headers=admin_headers) - - assert response.status_code == 409, response.text - assert "A reprovisioning is already in progress" in response.json["error"]["message"] - - -@pytest.mark.asyncio -async def test_message_queue_get_reprovisioning_status(sanic_client, admin_headers, user_headers, monkeypatch): - monkeypatch.setattr(renku_data_services.message_queue.blueprints, "reprovision", long_reprovisioning_mock) - - _, response = await sanic_client.get("/api/data/message_queue/reprovision", headers=user_headers) - - assert response.status_code == 404, response.text - - # NOTE: Start a reprovisioning - _, response = await sanic_client.post("/api/data/message_queue/reprovision", headers=admin_headers) - assert response.status_code == 201, response.text - - _, response = await sanic_client.get("/api/data/message_queue/reprovision", headers=user_headers) - - assert response.status_code == 200, response.text - assert response.json["id"] is not None - assert response.json["start_date"] is not None - - -@pytest.mark.asyncio -async def test_message_queue_can_stop_reprovisioning(sanic_client, admin_headers, monkeypatch) -> None: - monkeypatch.setattr(renku_data_services.message_queue.blueprints, "reprovision", long_reprovisioning_mock) - - _, response = await sanic_client.post("/api/data/message_queue/reprovision", headers=admin_headers) - assert response.status_code == 201, response.text - _, response = await sanic_client.get("/api/data/message_queue/reprovision", headers=admin_headers) - assert response.status_code == 200, response.text - - _, response = await sanic_client.delete("/api/data/message_queue/reprovision", headers=admin_headers) - assert response.status_code == 204, response.text - - _, response = await sanic_client.get("/api/data/message_queue/reprovision", headers=admin_headers) - - assert response.status_code == 404, response.text diff --git a/test/bases/renku_data_services/data_api/test_metrics.py b/test/bases/renku_data_services/data_api/test_metrics.py new file mode 100644 index 000000000..a7f495b3b --- /dev/null +++ b/test/bases/renku_data_services/data_api/test_metrics.py @@ -0,0 +1,49 @@ +import re +from collections.abc import AsyncGenerator +from typing import cast +from unittest.mock import AsyncMock, MagicMock + +import pytest +import pytest_asyncio +from sanic_testing.testing import SanicASGITestClient + +from renku_data_services.base_models.metrics import ProjectCreationType +from renku_data_services.metrics.core import StagingMetricsService + + +@pytest_asyncio.fixture +async def sanic_metrics_client(monkeypatch, app_manager, sanic_client) -> AsyncGenerator[SanicASGITestClient, None]: + monkeypatch.setenv("POSTHOG_ENABLED", "true") + + # NOTE: Replace the `project_created` and `session_launcher_created` methods with actual implementations to store + # metrics in the database. + metrics = StagingMetricsService(enabled=True, metrics_repo=app_manager.metrics_repo) + metrics_mock = cast(MagicMock, app_manager.metrics) + metrics_mock.configure_mock( + project_created=metrics.project_created, session_launcher_created=metrics.session_launcher_created + ) + + yield sanic_client + + metrics_mock.configure_mock(project_created=AsyncMock(), session_launcher_created=AsyncMock()) + + +@pytest.mark.asyncio +async def test_metrics_are_stored(sanic_metrics_client, app_manager, create_project, create_session_launcher) -> None: + project = await create_project("Project", sanic_client=sanic_metrics_client) + await create_session_launcher("Launcher 1", project_id=project["id"]) + + events = [e async for e in app_manager.metrics_repo.get_unprocessed_metrics()] + events.sort(key=lambda e: e.timestamp) + + assert len(events) == 2 + + project_created = events[0] + assert re.match(r"^[0-7][0-9A-HJKMNP-TV-Z]{25}$", str(project_created.id)) + assert project_created.event == "project_created" + assert project_created.metadata_ == {"project_creation_kind": ProjectCreationType.new.value} + + session_launcher_created = events[1] + assert re.match(r"^[0-7][0-9A-HJKMNP-TV-Z]{25}$", str(session_launcher_created.id)) + assert session_launcher_created.event == "session_launcher_created" + assert session_launcher_created.metadata_ == {"environment_image_source": "image", "environment_kind": "CUSTOM"} diff --git a/test/bases/renku_data_services/data_api/test_migrations.py b/test/bases/renku_data_services/data_api/test_migrations.py index 8068a3400..157757935 100644 --- a/test/bases/renku_data_services/data_api/test_migrations.py +++ b/test/bases/renku_data_services/data_api/test_migrations.py @@ -1,17 +1,24 @@ -import base64 +import random +import string +from collections.abc import Sequence from datetime import UTC, datetime -from typing import Any +from typing import Any, cast import pytest import sqlalchemy as sa from alembic.script import ScriptDirectory from sanic_testing.testing import SanicASGITestClient +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.sql import bindparam from ulid import ULID -from renku_data_services.app_config.config import Config -from renku_data_services.message_queue.avro_models.io.renku.events import v2 -from renku_data_services.message_queue.models import deserialize_binary +from renku_data_services import errors +from renku_data_services.base_models.core import Slug +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import downgrade_migrations_for_app, get_alembic_config, run_migrations_for_app +from renku_data_services.namespace import orm as ns_schemas +from renku_data_services.users import orm as user_schemas from renku_data_services.users.models import UserInfo @@ -27,36 +34,48 @@ async def test_unique_migration_head() -> None: @pytest.mark.asyncio +@pytest.mark.parametrize("downgrade_to, upgrade_to", [("base", "head"), ("fe3b7470d226", "8413f10ef77f")]) async def test_upgrade_downgrade_cycle( - app_config_instance: Config, + app_manager_instance: DependencyManager, sanic_client_no_migrations: SanicASGITestClient, admin_headers: dict, admin_user: UserInfo, + downgrade_to: str, + upgrade_to: str, ) -> None: # Migrate to head and create a project - run_migrations_for_app("common", "head") - await app_config_instance.kc_user_repo.initialize(app_config_instance.kc_api) - await app_config_instance.group_repo.generate_user_namespaces() + run_migrations_for_app("common", upgrade_to) + await app_manager_instance.kc_user_repo.initialize(app_manager_instance.kc_api) + await app_manager_instance.group_repo.generate_user_namespaces() payload: dict[str, Any] = { "name": "test_project", - "namespace": admin_user.namespace.slug, + "namespace": admin_user.namespace.path.serialize(), } _, res = await sanic_client_no_migrations.post("/api/data/projects", headers=admin_headers, json=payload) assert res.status_code == 201 + project_id = res.json["id"] # Migrate/downgrade a few times but end on head - downgrade_migrations_for_app("common", "base") - run_migrations_for_app("common", "head") - downgrade_migrations_for_app("common", "base") - run_migrations_for_app("common", "head") + downgrade_migrations_for_app("common", downgrade_to) + run_migrations_for_app("common", upgrade_to) + downgrade_migrations_for_app("common", downgrade_to) + run_migrations_for_app("common", upgrade_to) # Try to make the same project again # NOTE: The engine has to be disposed otherwise it caches the postgres types (i.e. enums) # from previous migrations and then trying to create a project below fails with the message # cache postgres lookup failed for type XXXX. - await app_config_instance.db.current._async_engine.dispose() - await app_config_instance.kc_user_repo.initialize(app_config_instance.kc_api) - await app_config_instance.group_repo.generate_user_namespaces() + await app_manager_instance.config.db.current._async_engine.dispose() + await app_manager_instance.kc_user_repo.initialize(app_manager_instance.kc_api) + await app_manager_instance.group_repo.generate_user_namespaces() _, res = await sanic_client_no_migrations.post("/api/data/projects", headers=admin_headers, json=payload) - assert res.status_code == 201, res.json + assert res.status_code in [201, 409], res.json + if res.status_code == 409: + # NOTE: This means the project is still in the DB because the down migration was not going + # far enough to delete the projects table, so we delete the project and recreate it to make sure + # things are OK. + _, res = await sanic_client_no_migrations.delete(f"/api/data/projects/{project_id}", headers=admin_headers) + assert res.status_code == 204, res.json + _, res = await sanic_client_no_migrations.post("/api/data/projects", headers=admin_headers, json=payload) + assert res.status_code == 201, res.json # !IMPORTANT: This test can only be run on v2 of the authz schema @@ -64,13 +83,13 @@ async def test_upgrade_downgrade_cycle( @pytest.mark.asyncio async def test_migration_to_f34b87ddd954( sanic_client_no_migrations: SanicASGITestClient, - app_config_instance: Config, + app_manager_instance: DependencyManager, user_headers: dict, admin_headers: dict, ) -> None: run_migrations_for_app("common", "d8676f0cde53") - await app_config_instance.kc_user_repo.initialize(app_config_instance.kc_api) - await app_config_instance.group_repo.generate_user_namespaces() + await app_manager_instance.kc_user_repo.initialize(app_manager_instance.kc_api) + await app_manager_instance.group_repo.generate_user_namespaces() sanic_client = sanic_client_no_migrations payloads = [ { @@ -94,28 +113,21 @@ async def test_migration_to_f34b87ddd954( _, response = await sanic_client.get("/api/data/groups", headers=user_headers) assert response.status_code == 200 assert len(response.json) == 0 - # The database should have delete events for the groups - events_orm = await app_config_instance.event_repo.get_pending_events() - group_removed_events = [ - deserialize_binary(base64.b64decode(e.payload["payload"]), v2.GroupRemoved) - for e in events_orm - if e.queue == "group.removed" - ] - assert len(group_removed_events) == 2 - assert set(added_group_ids) == {e.id for e in group_removed_events} @pytest.mark.asyncio -async def test_migration_to_1ef98b967767_and_086eb60b42c8(app_config_instance: Config, admin_user: UserInfo) -> None: +async def test_migration_to_1ef98b967767_and_086eb60b42c8( + app_manager_instance: DependencyManager, admin_user: UserInfo +) -> None: """Tests the migration of the session launchers.""" run_migrations_for_app("common", "b8cbd62e85b9") - await app_config_instance.kc_user_repo.initialize(app_config_instance.kc_api) - await app_config_instance.group_repo.generate_user_namespaces() + global_env_id = str(ULID()) custom_launcher_id = str(ULID()) global_launcher_id = str(ULID()) project_id = str(ULID()) - async with app_config_instance.db.async_session_maker() as session, session.begin(): + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + await _generate_user_namespaces(session) await session.execute( sa.text( "INSERT INTO " @@ -178,7 +190,7 @@ async def test_migration_to_1ef98b967767_and_086eb60b42c8(app_config_instance: C ) ) run_migrations_for_app("common", "1ef98b967767") - async with app_config_instance.db.async_session_maker() as session, session.begin(): + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): res = await session.execute( sa.text("SELECT * FROM sessions.environments WHERE name = :name").bindparams(name="global env") ) @@ -200,7 +212,7 @@ async def test_migration_to_1ef98b967767_and_086eb60b42c8(app_config_instance: C "--ContentsManager.allow_hidden=true --ServerApp.allow_origin=*", ] assert global_env["environment_kind"] == "GLOBAL" - async with app_config_instance.db.async_session_maker() as session, session.begin(): + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): res = await session.execute( sa.text("SELECT * FROM sessions.environments WHERE name != :name").bindparams(name="global env") ) @@ -221,7 +233,7 @@ async def test_migration_to_1ef98b967767_and_086eb60b42c8(app_config_instance: C "--ContentsManager.allow_hidden=true --ServerApp.allow_origin=*", ] assert custom_env["environment_kind"] == "CUSTOM" - async with app_config_instance.db.async_session_maker() as session, session.begin(): + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): res = await session.execute( sa.text("SELECT * FROM sessions.launchers WHERE id = :id").bindparams(id=custom_launcher_id) ) @@ -231,7 +243,7 @@ async def test_migration_to_1ef98b967767_and_086eb60b42c8(app_config_instance: C assert custom_launcher["name"] == "custom" assert custom_launcher["project_id"] == project_id assert custom_launcher["environment_id"] == custom_env["id"] - async with app_config_instance.db.async_session_maker() as session, session.begin(): + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): res = await session.execute( sa.text("SELECT * FROM sessions.launchers WHERE id = :id").bindparams(id=global_launcher_id) ) @@ -242,7 +254,7 @@ async def test_migration_to_1ef98b967767_and_086eb60b42c8(app_config_instance: C assert global_launcher["project_id"] == project_id assert global_launcher["environment_id"] == global_env["id"] run_migrations_for_app("common", "086eb60b42c8") - async with app_config_instance.db.async_session_maker() as session, session.begin(): + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): res = await session.execute( sa.text("SELECT * FROM sessions.environments WHERE name = :name").bindparams(name="global env") ) @@ -255,3 +267,345 @@ async def test_migration_to_1ef98b967767_and_086eb60b42c8(app_config_instance: C '--ServerApp.token="" --ServerApp.password="" --ServerApp.allow_remote_access=true ' '--ContentsManager.allow_hidden=true --ServerApp.allow_origin=* --ServerApp.root_dir="/home/jovyan/work"', ] + + +@pytest.mark.asyncio +async def test_migration_create_global_envs( + app_manager_instance: DependencyManager, + sanic_client_no_migrations: SanicASGITestClient, + admin_headers: dict, + admin_user: UserInfo, + tmpdir_factory, + monkeysession, +) -> None: + run_migrations_for_app("common", "head") + envs = await app_manager_instance.session_repo.get_environments() + assert len(envs) == 2 + assert any(e.name == "Python/Jupyter" for e in envs) + assert any(e.name == "Rstudio" for e in envs) + + +@pytest.mark.asyncio +async def test_migration_to_75c83dd9d619(app_manager_instance: DependencyManager, admin_user: UserInfo) -> None: + """Tests the migration for copying session environments of copied projects.""" + + async def insert_project(session: AsyncSession, payload: dict[str, Any]) -> None: + bindparams: list[sa.BindParameter] = [] + cols: list[str] = list(payload.keys()) + cols_joined = ", ".join(cols) + ids = ", ".join([":" + col for col in cols]) + if "visibility" in payload: + bindparams.append(bindparam("visibility", literal_execute=True)) + stmt = sa.text( + f"INSERT INTO projects.projects({cols_joined}) VALUES({ids})", + ).bindparams(*bindparams, **payload) + await session.execute(stmt) + + async def insert_environment(session: AsyncSession, payload: dict[str, Any]) -> None: + bindparams: list[sa.BindParameter] = [] + cols: list[str] = list(payload.keys()) + cols_joined = ", ".join(cols) + ids = ", ".join([":" + col for col in cols]) + if "command" in payload: + bindparams.append(bindparam("command", type_=JSONB)) + if "args" in payload: + bindparams.append(bindparam("args", type_=JSONB)) + if "environment_kind" in payload: + bindparams.append(bindparam("environment_kind", literal_execute=True)) + stmt = sa.text(f"INSERT INTO sessions.environments({cols_joined}) VALUES ({ids})").bindparams( + *bindparams, + **payload, + ) + await session.execute(stmt) + + async def insert_session_launcher(session: AsyncSession, payload: dict[str, Any]) -> None: + cols: list[str] = list(payload.keys()) + cols_joined = ", ".join(cols) + ids = ", ".join([":" + col for col in cols]) + stmt = sa.text(f"INSERT INTO sessions.launchers({cols_joined}) VALUES ({ids})").bindparams( + **payload, + ) + await session.execute(stmt) + + def find_by_col(data: Sequence[sa.Row[Any]], id_value: Any, id_index: int) -> tuple | None: + for row in data: + if row.tuple()[id_index] == id_value: + return cast(tuple, row.tuple()) + return None + + run_migrations_for_app("common", "450ae3930996") + + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + await _generate_user_namespaces(session) + # Create template project + project_id = str(ULID()) + await insert_project( + session, + dict( + id=project_id, + name="test_project", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + visibility="public", + ), + ) + # Create clone project + cloned_project_id = str(ULID()) + await insert_project( + session, + dict( + id=cloned_project_id, + name="cloned_project", + created_by_id="some-other-user-id", + creation_date=datetime.now(UTC), + visibility="public", + template_id=project_id, + ), + ) + # Create a clone project that has removed its parent reference + cloned_project_orphan_id = str(ULID()) + await insert_project( + session, + dict( + id=cloned_project_orphan_id, + name="cloned_project_orphan", + created_by_id="some-other-user-id", + creation_date=datetime.now(UTC), + visibility="public", + ), + ) + # Create unrelated project + random_project_id = str(ULID()) + await insert_project( + session, + dict( + id=random_project_id, + name="random_project", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + visibility="public", + ), + ) + # Create a single environment + custom_env_id = str(ULID()) + await insert_environment( + session, + dict( + id=custom_env_id, + name="custom env", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + container_image="env_image", + default_url="/env_url", + port=8888, + args=["arg1"], + command=["command1"], + uid=1000, + gid=1000, + environment_kind="CUSTOM", + ), + ) + # Create an unrelated environment + random_env_id = str(ULID()) + await insert_environment( + session, + dict( + id=random_env_id, + name="random env", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + container_image="env_image", + default_url="/env_url", + port=8888, + args=["arg1"], + command=["command1"], + uid=1000, + gid=1000, + environment_kind="CUSTOM", + ), + ) + # Create two session launchers for each project, but both are using the same env + custom_launcher_id = str(ULID()) + await insert_session_launcher( + session, + dict( + id=custom_launcher_id, + name="custom", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + environment_id=custom_env_id, + project_id=project_id, + ), + ) + custom_launcher_id_cloned = str(ULID()) + await insert_session_launcher( + session, + dict( + id=custom_launcher_id_cloned, + name="custom_for_cloned_project", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + environment_id=custom_env_id, + project_id=cloned_project_id, + ), + ) + # A session launcher for the cloned orphaned project + custom_launcher_id_orphan_cloned = str(ULID()) + await insert_session_launcher( + session, + dict( + id=custom_launcher_id_orphan_cloned, + name="custom_for_cloned_orphaned_project", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + environment_id=custom_env_id, + project_id=cloned_project_orphan_id, + ), + ) + # Create an unrelated session launcher that should be unaffected by the migrations + random_launcher_id = str(ULID()) + await insert_session_launcher( + session, + dict( + id=random_launcher_id, + name="random_launcher", + created_by_id=admin_user.id, + creation_date=datetime.now(UTC), + environment_id=random_env_id, + project_id=random_project_id, + ), + ) + run_migrations_for_app("common", "75c83dd9d619") + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + launchers = (await session.execute(sa.text("SELECT id, environment_id, name FROM sessions.launchers"))).all() + envs = ( + await session.execute( + sa.text("SELECT id, created_by_id, name FROM sessions.environments WHERE environment_kind = 'CUSTOM'") + ) + ).all() + assert len(launchers) == 4 + assert len(envs) == 4 + # Filter the results from the DB + random_env_row = find_by_col(envs, random_env_id, 0) + assert random_env_row is not None + random_launcher_row = find_by_col(launchers, random_launcher_id, 0) + assert random_launcher_row is not None + custom_launcher_row = find_by_col(launchers, custom_launcher_id, 0) + assert custom_launcher_row is not None + custom_launcher_clone_row = find_by_col(launchers, custom_launcher_id_cloned, 0) + assert custom_launcher_clone_row is not None + env1_row = find_by_col(envs, custom_launcher_row[1], 0) + assert env1_row is not None + env2_row = find_by_col(envs, custom_launcher_clone_row[1], 0) + assert env2_row is not None + # Check that the session launcher for the cloned project is not using the same env as the parent + assert custom_launcher_row[0] != custom_launcher_clone_row[0] + assert custom_launcher_row[1] != custom_launcher_clone_row[1] + assert custom_launcher_row[2] != custom_launcher_clone_row[2] + # The copied and original env should have different ids and created_by fields + assert env1_row[0] != env2_row[0] + assert env1_row[1] != env2_row[1] + # The copied and the original env have the same name + assert env1_row[2] == env2_row[2] + # Check that the random environment is unchanged + assert random_env_row[0] == random_env_id + assert random_env_row[1] == admin_user.id + assert random_env_row[2] == "random env" + # Check that the orphaned cloned project's environment has been also decoupled + orphan_launcher_row = find_by_col(launchers, custom_launcher_id_orphan_cloned, 0) + assert orphan_launcher_row is not None + orphan_env_row = find_by_col(envs, orphan_launcher_row[1], 0) + assert orphan_env_row is not None + assert custom_launcher_row[0] != orphan_launcher_row[0] + assert custom_launcher_row[1] != orphan_launcher_row[1] + assert custom_launcher_row[2] != orphan_launcher_row[2] + assert env1_row[0] != orphan_env_row[0] + assert env1_row[1] != orphan_env_row[1] + assert env1_row[2] == orphan_env_row[2] + + +async def _generate_user_namespaces(session: AsyncSession) -> list[UserInfo]: + """Generate user namespaces if the user table has data and the namespaces table is empty. + + NOTE: This is copied from GroupRepository to retain the version compatible with db at a fixed point.""" + + async def _create_user_namespace_slug( + session: AsyncSession, user_slug: str, retry_enumerate: int = 0, retry_random: bool = False + ) -> str: + """Create a valid namespace slug for a user.""" + nss = await session.scalars( + sa.select(ns_schemas.NamespaceORM.slug).where(ns_schemas.NamespaceORM.slug.startswith(user_slug)) + ) + nslist = nss.all() + if user_slug not in nslist: + return user_slug + if retry_enumerate: + for inc in range(1, retry_enumerate + 1): + slug = f"{user_slug}-{inc}" + if slug not in nslist: + return slug + if retry_random: + suffix = "".join([random.choice(string.ascii_lowercase + string.digits) for _ in range(8)]) # nosec B311 + slug = f"{user_slug}-{suffix}" + if slug not in nslist: + return slug + + raise errors.ValidationError(message=f"Cannot create generate a unique namespace slug for the user {user_slug}") + + async def _insert_user_namespace( + session: AsyncSession, user_id: str, user_slug: str, retry_enumerate: int = 0, retry_random: bool = False + ) -> ns_schemas.NamespaceORM: + """Insert a new namespace for the user and optionally retry different variations to avoid collisions.""" + namespace = await _create_user_namespace_slug(session, user_slug, retry_enumerate, retry_random) + slug = Slug.from_name(namespace) + ns = ns_schemas.NamespaceORM(slug.value, user_id=user_id) + session.add(ns) + await session.flush() + await session.refresh(ns) + return ns + + # NOTE: lock to make sure another instance of the data service cannot insert/update but can read + output: list[UserInfo] = [] + await session.execute(sa.text("LOCK TABLE common.namespaces IN EXCLUSIVE MODE")) + at_least_one_namespace = (await session.execute(sa.select(ns_schemas.NamespaceORM).limit(1))).one_or_none() + if at_least_one_namespace: + return [] + + res = await session.scalars(sa.select(user_schemas.UserORM)) + for user in res: + slug = Slug.from_user(user.email, user.first_name, user.last_name, user.keycloak_id) + ns = await _insert_user_namespace(session, user.keycloak_id, slug.value, retry_enumerate=10, retry_random=True) + user.namespace = ns + output.append(user.dump()) + + return output + + +@pytest.mark.asyncio +async def test_migration_to_dcb9648c3c15(app_manager_instance: DependencyManager, admin_user: UserInfo) -> None: + run_migrations_for_app("common", "042eeb50cd8e") + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + await session.execute( + sa.text( + "INSERT into " + "common.k8s_objects(name, namespace, manifest, deleted, kind, version, cluster, user_id) " + "VALUES ('name_pod', 'ns', '{}', FALSE, 'pod', 'v1', 'cluster', 'user_id')" + ) + ) + await session.execute( + sa.text( + "INSERT into " + "common.k8s_objects(name, namespace, manifest, deleted, kind, version, cluster, user_id) " + "VALUES ('name_js', 'ns', '{}', FALSE, 'jupyterserver', 'amalthea.dev/v1alpha1', 'cluster', 'user_id')" + ) + ) + run_migrations_for_app("common", "dcb9648c3c15") + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + k8s_objs = (await session.execute(sa.text('SELECT "group", version, kind FROM common.k8s_objects'))).all() + assert len(k8s_objs) == 2 + assert k8s_objs[0].tuple()[0] is None + assert k8s_objs[0].tuple()[1] == "v1" + assert k8s_objs[0].tuple()[2] == "pod" + assert k8s_objs[1].tuple()[0] == "amalthea.dev" + assert k8s_objs[1].tuple()[1] == "v1alpha1" + assert k8s_objs[1].tuple()[2] == "jupyterserver" diff --git a/test/bases/renku_data_services/data_api/test_namespaces.py b/test/bases/renku_data_services/data_api/test_namespaces.py index 2e7ff94e4..99bbc1f9b 100644 --- a/test/bases/renku_data_services/data_api/test_namespaces.py +++ b/test/bases/renku_data_services/data_api/test_namespaces.py @@ -1,4 +1,9 @@ import pytest +from sqlalchemy import select + +from renku_data_services.data_api.dependencies import DependencyManager +from renku_data_services.namespace.orm import EntitySlugORM +from renku_data_services.users.models import UserInfo @pytest.mark.asyncio @@ -55,6 +60,43 @@ async def test_list_namespaces_pagination(sanic_client, user_headers) -> None: assert response.headers.get("total") == "7" assert response.headers.get("total-pages") == "4" + _, response = await sanic_client.get("/api/data/namespaces?per_page=1&page=3", headers=user_headers) + assert response.status_code == 200, response.text + res_json = response.json + assert len(res_json) == 1 + user_ns = res_json[0] + assert user_ns["slug"] == "group-2" + assert response.headers.get("page") == "3" + assert response.headers.get("per-page") == "1" + assert response.headers.get("total") == "7" + assert response.headers.get("total-pages") == "7" + + _, response = await sanic_client.get("/api/data/namespaces?per_page=5&page=1", headers=user_headers) + assert response.status_code == 200, response.text + res_json = response.json + assert len(res_json) == 5 + user_ns = res_json[0] + assert user_ns["slug"] == "user.doe" + user_ns = res_json[4] + assert user_ns["slug"] == "group-4" + assert response.headers.get("page") == "1" + assert response.headers.get("per-page") == "5" + assert response.headers.get("total") == "7" + assert response.headers.get("total-pages") == "2" + + _, response = await sanic_client.get("/api/data/namespaces?per_page=5&page=2", headers=user_headers) + assert response.status_code == 200, response.text + res_json = response.json + assert len(res_json) == 2 + user_ns = res_json[0] + assert user_ns["slug"] == "group-5" + user_ns = res_json[1] + assert user_ns["slug"] == "group-6" + assert response.headers.get("page") == "2" + assert response.headers.get("per-page") == "5" + assert response.headers.get("total") == "7" + assert response.headers.get("total-pages") == "2" + @pytest.mark.asyncio async def test_list_namespaces_all_groups_are_public(sanic_client, user_headers, member_1_headers) -> None: @@ -196,3 +238,297 @@ async def test_get_namespace_by_slug_anonymously(sanic_client, user_headers) -> assert response.status_code == 200, response.text assert response.json["slug"] == "user.doe" assert response.json["namespace_kind"] == "user" + + +@pytest.mark.asyncio +async def test_entity_slug_uniqueness(sanic_client, user_headers) -> None: + # Create a group i.e. /test1 + payload = { + "name": "test1", + "slug": "test1", + "description": "Group 1 Description", + } + _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # A group with the same name cannot be created + payload = { + "name": "test-conflict", + "slug": "user.doe", + "description": "Group 1 Description", + } + _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) + assert response.status_code == 422, response.text + + # Create a project in the group /test1/test1 + payload = { + "name": "test1", + "namespace": "test1", + "slug": "test1", + } + _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # Create a data connector in the project /test1/test1/test1 + payload = { + "name": "test1", + "namespace": "test1/test1", + "slug": "test1", + "storage": { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + }, + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # Creating the project again should fail because no slugs are free + _, response = await sanic_client.post("/api/data/data_connectors", headers=user_headers, json=payload) + assert response.status_code == 409, response.text + + # Create a data connector in the same project with a different name /test1/test1/test2 + payload = { + "name": "test2", + "namespace": "test1/test1", + "slug": "test2", + "storage": { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + }, + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # Create a new project in the same group with the same name as the data connector /test1/test2 + payload = { + "name": "test2", + "namespace": "test1", + "slug": "test2", + } + _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # Trying to create a data connector with the same slug as the project in the same group should succeed + # i.e. /test1/test2/test1 because the test2 project does not have a data connector called test1 + payload = { + "name": "test1", + "namespace": "test1/test2", + "slug": "test1", + "storage": { + "configuration": {"type": "s3", "endpoint": "http://s3.aws.com"}, + "source_path": "giab", + "target_path": "giab", + }, + } + _, response = await sanic_client.post("/api/data/data_connectors", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + +@pytest.mark.asyncio +async def test_listing_project_namespaces(sanic_client, user_headers) -> None: + # Create a group i.e. /test1 + payload = { + "name": "test1", + "slug": "test1", + "description": "Group 1 Description", + } + _, response = await sanic_client.post("/api/data/groups", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # Create a project in the group /test1/test1 + payload = { + "name": "proj1", + "namespace": "test1", + "slug": "proj1", + } + _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # Create a new project in the same group with the same name as the data connector /test1/test2 + payload = { + "name": "proj2", + "namespace": "test1", + "slug": "proj2", + } + _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + # If not defined by default you get only user and group namespaces + _, response = await sanic_client.get("/api/data/namespaces", headers=user_headers) + assert response.status_code == 200, response.text + assert len(response.json) == 2 + + # If requested then you should get all + _, response = await sanic_client.get( + "/api/data/namespaces", headers=user_headers, params={"kinds": ["group", "user", "project"]} + ) + assert response.status_code == 200, response.text + assert len(response.json) == 4 + + # If requested only projects then you should get only projects + _, response = await sanic_client.get("/api/data/namespaces", headers=user_headers, params={"kinds": ["project"]}) + assert response.status_code == 200, response.text + assert len(response.json) == 2 + assert response.json[0]["name"] == "proj1" + assert response.json[0]["namespace_kind"] == "project" + assert response.json[0]["slug"] == "proj1" + assert response.json[0]["path"] == "test1/proj1" + assert response.json[1]["name"] == "proj2" + assert response.json[1]["namespace_kind"] == "project" + assert response.json[1]["slug"] == "proj2" + assert response.json[1]["path"] == "test1/proj2" + + +@pytest.mark.asyncio +async def test_stored_procedure_cleanup_after_project_slug_deletion( + create_project, + user_headers, + app_manager: DependencyManager, + sanic_client, + create_data_connector, +) -> None: + # We use stored procedures to remove a project when its slug is removed + proj = await create_project(name="test1") + proj_id = proj.get("id") + assert proj_id is not None + namespace = proj.get("namespace") + assert namespace is not None + proj_slug = proj.get("slug") + assert proj_slug is not None + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) + assert response.status_code == 200 + dc = await create_data_connector(name="test-dc", namespace=f"{namespace}/{proj_slug}") + dc_id = dc.get("id") + assert dc_id is not None + assert dc is not None + async with app_manager.config.db.async_session_maker() as session, session.begin(): + # We do not have APIs exposed that will remove the slug so this is the only way to trigger this + stmt = ( + select(EntitySlugORM) + .where(EntitySlugORM.project_id == proj_id) + .where(EntitySlugORM.namespace_id.is_not(None)) + .where(EntitySlugORM.data_connector_id.is_(None)) + ) + res = await session.scalar(stmt) + assert res is not None + await session.delete(res) + await session.flush() + # The project namespace is not there + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}/{proj_slug}", headers=user_headers) + assert response.status_code == 404 + # The user or group namespace is untouched + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) + assert response.status_code == 200 + # The project and data connector are both gone + _, response = await sanic_client.get(f"/api/data/projects/{proj_id}", headers=user_headers) + assert response.status_code == 404 + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc_id}", headers=user_headers) + assert response.status_code == 404 + + +@pytest.mark.asyncio +async def test_stored_procedure_cleanup_after_data_connector_slug_deletion( + create_project, + user_headers, + app_manager: DependencyManager, + sanic_client, + create_data_connector, +) -> None: + # We use stored procedures to remove a data connector when its slug is removed + proj = await create_project(name="test1") + proj_id = proj.get("id") + assert proj_id is not None + namespace = proj.get("namespace") + assert namespace is not None + proj_slug = proj.get("slug") + assert proj_slug is not None + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) + assert response.status_code == 200 + dc1 = await create_data_connector(name="test-dc", namespace=f"{namespace}/{proj_slug}") + dc1_id = dc1.get("id") + assert dc1_id is not None + assert dc1 is not None + dc2 = await create_data_connector(name="test-dc", namespace=namespace) + dc2_id = dc2.get("id") + assert dc2_id is not None + assert dc2 is not None + async with app_manager.config.db.async_session_maker() as session, session.begin(): + # We do not have APIs exposed that will remove the slug so this is the only way to trigger this + stmt = select(EntitySlugORM).where(EntitySlugORM.data_connector_id == dc1_id) + scalars = await session.scalars(stmt) + res = scalars.one_or_none() + assert res is not None + await session.delete(res) + stmt = select(EntitySlugORM).where(EntitySlugORM.data_connector_id == dc2_id) + scalars = await session.scalars(stmt) + res = scalars.one_or_none() + assert res is not None + await session.delete(res) + await session.flush() + # The project namespace is still there + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}/{proj_slug}", headers=user_headers) + assert response.status_code == 200 + # The user or group namespace is untouched + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) + assert response.status_code == 200 + # The project is still there + _, response = await sanic_client.get(f"/api/data/projects/{proj_id}", headers=user_headers) + assert response.status_code == 200 + # The data connectors are gone + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc1_id}", headers=user_headers) + assert response.status_code == 404 + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc2_id}", headers=user_headers) + assert response.status_code == 404 + + +async def test_cleanup_with_group_deletion( + create_project, + create_group, + user_headers, + sanic_client, + regular_user: UserInfo, + create_data_connector, +) -> None: + grp = await create_group("grp1") + grp_id = grp.get("id") + assert grp_id is not None + grp_slug = grp.get("slug") + assert grp_slug is not None + prj = await create_project(name="prj1", namespace=grp_slug) + prj_id = prj.get("id") + assert prj_id is not None + prj_slug = prj.get("slug") + assert prj_slug is not None + dc1 = await create_data_connector(name="dc1", namespace=grp_slug) + dc1_id = dc1.get("id") + assert dc1_id is not None + dc2 = await create_data_connector(name="dc2", namespace=f"{grp_slug}/{prj_slug}") + dc2_id = dc2.get("id") + assert dc2_id is not None + dc3 = await create_data_connector(name="dc3", namespace=regular_user.namespace.path.serialize()) + dc3_id = dc3.get("id") + assert dc3_id is not None + # Delete the group + _, response = await sanic_client.delete(f"/api/data/groups/{grp_slug}", headers=user_headers) + assert response.status_code == 204 + _, response = await sanic_client.get(f"/api/data/groups/{grp_slug}", headers=user_headers) + assert response.status_code == 404 + # # The project namespace is not there + _, response = await sanic_client.get(f"/api/data/namespaces/{grp_slug}/{prj_slug}", headers=user_headers) + assert response.status_code == 404 + # The group namespace is not there + _, response = await sanic_client.get(f"/api/data/namespaces/{grp_slug}", headers=user_headers) + assert response.status_code == 404 + # The project is not there + _, response = await sanic_client.get(f"/api/data/projects/{prj_id}", headers=user_headers) + assert response.status_code == 404 + # The group and project data connectors are gone + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc1_id}", headers=user_headers) + assert response.status_code == 404 + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc2_id}", headers=user_headers) + assert response.status_code == 404 + # The data connector in the user namespace is still there + _, response = await sanic_client.get(f"/api/data/data_connectors/{dc3_id}", headers=user_headers) + assert response.status_code == 200 diff --git a/test/bases/renku_data_services/data_api/test_notebooks.py b/test/bases/renku_data_services/data_api/test_notebooks.py index 22e0c9d76..d8261eee3 100644 --- a/test/bases/renku_data_services/data_api/test_notebooks.py +++ b/test/bases/renku_data_services/data_api/test_notebooks.py @@ -1,23 +1,31 @@ """Tests for notebook blueprints.""" import asyncio -import os -from collections.abc import AsyncIterator +import contextlib +from collections.abc import AsyncGenerator, AsyncIterator, Generator from contextlib import suppress +from datetime import timedelta from unittest.mock import MagicMock from uuid import uuid4 +import kr8s import pytest import pytest_asyncio from kr8s import NotFoundError -from kr8s.asyncio.objects import Pod from sanic_testing.testing import SanicASGITestClient +from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER +from renku_data_services.k8s.models import Cluster +from renku_data_services.k8s_watcher import K8sWatcher, k8s_object_handler from renku_data_services.notebooks.api.classes.k8s_client import JupyterServerV1Alpha1Kr8s +from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK from .utils import ClusterRequired, setup_amalthea -os.environ["KUBECONFIG"] = ".k3d-config.yaml" + +@pytest.fixture(scope="module", autouse=True) +def kubeconfig(monkeysession): + monkeysession.setenv("KUBECONFIG", ".k3d-config.yaml") @pytest.fixture @@ -49,51 +57,15 @@ def pod_name(server_name: str) -> str: return f"{server_name}-0" -@pytest_asyncio.fixture -async def jupyter_server(renku_image: str, server_name: str, pod_name: str) -> AsyncIterator[JupyterServerV1Alpha1Kr8s]: - """Fake server to have the minimal set of objects for tests""" - - server = await JupyterServerV1Alpha1Kr8s( - { - "metadata": {"name": server_name, "labels": {"renku.io/safe-username": "user"}}, - "spec": {"jupyterServer": {"image": renku_image}, "routing": {"host": "locahost"}, "auth": {"token": ""}}, - } - ) - - await server.create() - pod = await Pod(dict(metadata=dict(name=pod_name))) - max_retries = 200 - sleep_seconds = 0.2 - retries = 0 - while True: - retries += 1 - pod_exists = await pod.exists() - if pod_exists: - break - if retries > max_retries: - raise ValueError( - f"The pod {pod_name} for the session {server_name} could not found even after {max_retries} " - f"retries with {sleep_seconds} seconds of sleep after each retry." - ) - await asyncio.sleep(sleep_seconds) - await pod.refresh() - await pod.wait("condition=Ready") - yield server - # NOTE: This is used also in tests that check if the server was properly stopped - # in this case the server will already be gone when we try to delete it in the cleanup here. - with suppress(NotFoundError): - await server.delete("Foreground") - - @pytest_asyncio.fixture() -async def practice_jupyter_server(renku_image: str, server_name: str) -> AsyncIterator[JupyterServerV1Alpha1Kr8s]: +async def jupyter_server(renku_image: str, server_name: str) -> AsyncIterator[JupyterServerV1Alpha1Kr8s]: """Fake server for non pod related tests""" server = await JupyterServerV1Alpha1Kr8s( { "metadata": { "name": server_name, - "labels": {"renku.io/safe-username": "user"}, + "labels": {"renku.io/safe-username": "user", "renku.io/userId": "user"}, "annotations": { "renku.io/branch": "dummy", "renku.io/commit-sha": "sha", @@ -103,7 +75,17 @@ async def practice_jupyter_server(renku_image: str, server_name: str) -> AsyncIt "renku.io/repository": "dummy", }, }, - "spec": {"jupyterServer": {"image": renku_image}}, + "spec": { + "jupyterServer": { + "image": renku_image, + "resources": { + "requests": { + "cpu": 0.1, + "memory": 100_000_000, + }, + }, + }, + }, } ) @@ -124,6 +106,7 @@ class AttributeDictionary(dict): """Enables accessing dictionary keys as attributes""" def __init__(self, dictionary): + super().__init__() for key, value in dictionary.items(): # TODO check if key is a valid identifier if key == "list": @@ -136,7 +119,7 @@ def __init__(self, dictionary): self[key] = value def list(self): - [value for _, value in self.items()] + return [value for _, value in self.items()] def __setitem__(self, k, v): if k == "list": @@ -146,32 +129,38 @@ def __setitem__(self, k, v): @pytest.fixture -def fake_gitlab_projects(): +def fake_gitlab_project_info(): + return AttributeDictionary( + { + "path": "my-test", + "path_with_namespace": "test-namespace/my-test", + "branches": {"main": AttributeDictionary({})}, + "commits": {"ee4b1c9fedc99abe5892ee95320bbd8471c5985b": AttributeDictionary({})}, + "id": 5407, + "http_url_to_repo": "https://gitlab-url.com/test-namespace/my-test.git", + "web_url": "https://gitlab-url.com/test-namespace/my-test", + } + ) + + +@pytest.fixture +def fake_gitlab_projects(fake_gitlab_project_info): class GitLabProject(AttributeDictionary): def __init__(self): super().__init__({}) def get(self, name, default=None): if name not in self: - return AttributeDictionary( - { - "path": "my-test", - "path_with_namespace": "test-namespace/my-test", - "branches": {"main": AttributeDictionary({})}, - "commits": {"ee4b1c9fedc99abe5892ee95320bbd8471c5985b": AttributeDictionary({})}, - "id": 5407, - "http_url_to_repo": "https://gitlab-url.com/test-namespace/my-test.git", - "web_url": "https://gitlab-url.com/test-namespace/my-test", - } - ) + return fake_gitlab_project_info return super().get(name, default) return GitLabProject() @pytest.fixture() -def fake_gitlab(mocker, fake_gitlab_projects): +def fake_gitlab(mocker, fake_gitlab_projects, fake_gitlab_project_info): gitlab = mocker.patch("renku_data_services.notebooks.api.classes.user.Gitlab") + get_project = mocker.patch("renku_data_services.notebooks.api.classes.user._get_project") gitlab_mock = MagicMock() gitlab_mock.auth = MagicMock() gitlab_mock.projects = fake_gitlab_projects @@ -180,6 +169,7 @@ def fake_gitlab(mocker, fake_gitlab_projects): ) gitlab_mock.url = "https://gitlab-url.com" gitlab.return_value = gitlab_mock + get_project.return_value = fake_gitlab_project_info return gitlab @@ -253,21 +243,47 @@ async def test_check_docker_image(sanic_client: SanicASGITestClient, user_header class TestNotebooks(ClusterRequired): @pytest.fixture(scope="class", autouse=True) - def amalthea(self, cluster) -> None: + def amalthea(self, cluster, app_manager) -> Generator[None, None]: if cluster is not None: setup_amalthea("amalthea-js", "amalthea", "0.12.2", cluster) + app_manager.config.nb_config._kr8s_api.push(asyncio.run(kr8s.asyncio.api())) + + yield + app_manager.config.nb_config._kr8s_api.pop() + + @pytest_asyncio.fixture(scope="class", autouse=True) + async def k8s_watcher(self, amalthea, app_manager) -> AsyncGenerator[None, None]: + clusters = [ + Cluster( + id=DEFAULT_K8S_CLUSTER, + namespace=app_manager.config.nb_config.k8s.renku_namespace, + api=app_manager.config.nb_config._kr8s_api.current, + ) + ] + + # sleep to give amalthea a chance to create the CRDs, otherwise the watcher can error out + await asyncio.sleep(1) + watcher = K8sWatcher( + handler=k8s_object_handler( + app_manager.config.nb_config.k8s_db_cache, app_manager.metrics, app_manager.rp_repo + ), + clusters={c.id: c for c in clusters}, + kinds=[JUPYTER_SESSION_GVK], + db_cache=app_manager.config.nb_config.k8s_db_cache, + ) + asyncio.create_task(watcher.start()) + yield + with contextlib.suppress(TimeoutError): + await watcher.stop(timeout=timedelta(seconds=1)) @pytest.mark.asyncio async def test_user_server_list( - self, - sanic_client: SanicASGITestClient, - request, - server_name, - jupyter_server, - authenticated_user_headers, + self, sanic_client: SanicASGITestClient, authenticated_user_headers, fake_gitlab, jupyter_server ): """Validate that the user server list endpoint answers correctly""" + await asyncio.sleep(1) # wait a bit for k8s events to be processed in the background + _, res = await sanic_client.get("/api/data/notebooks/servers", headers=authenticated_user_headers) assert res.status_code == 200, res.text @@ -275,40 +291,41 @@ async def test_user_server_list( assert len(res.json["servers"]) == 1 @pytest.mark.asyncio - @pytest.mark.parametrize( - "server_name_fixture,expected_status_code", [("unknown_server_name", 404), ("server_name", 200)] - ) + @pytest.mark.parametrize("server_exists,expected_status_code", [(False, 404), (True, 200)]) async def test_log_retrieval( self, sanic_client: SanicASGITestClient, - request, - server_name_fixture, - expected_status_code, + server_exists, jupyter_server, + expected_status_code, authenticated_user_headers, + fake_gitlab, ): """Validate that the logs endpoint answers correctly""" - server_name = request.getfixturevalue(server_name_fixture) + server_name = "unknown_server" + if server_exists: + server_name = jupyter_server.name + await asyncio.sleep(2) # wait a bit for k8s events to be processed in the background _, res = await sanic_client.get(f"/api/data/notebooks/logs/{server_name}", headers=authenticated_user_headers) assert res.status_code == expected_status_code, res.text @pytest.mark.asyncio - @pytest.mark.parametrize( - "server_name_fixture,expected_status_code", [("unknown_server_name", 404), ("server_name", 204)] - ) + @pytest.mark.parametrize("server_exists,expected_status_code", [(False, 204), (True, 204)]) async def test_stop_server( self, sanic_client: SanicASGITestClient, - request, - server_name_fixture, + server_exists, + jupyter_server, expected_status_code, - practice_jupyter_server, authenticated_user_headers, + fake_gitlab, ): - server_name = request.getfixturevalue(server_name_fixture) + server_name = "unknown_server" + if server_exists: + server_name = jupyter_server.name _, res = await sanic_client.delete( f"/api/data/notebooks/servers/{server_name}", headers=authenticated_user_headers @@ -318,57 +335,37 @@ async def test_stop_server( @pytest.mark.asyncio @pytest.mark.parametrize( - "server_name_fixture,expected_status_code, patch", - [("unknown_server_name", 404, {}), ("server_name", 200, {"state": "hibernated"})], + "server_exists,expected_status_code, patch", + [(False, 404, {}), (True, 200, {"state": "hibernated"})], ) async def test_patch_server( self, sanic_client: SanicASGITestClient, - request, - server_name_fixture, + server_exists, + jupyter_server, expected_status_code, patch, - practice_jupyter_server, authenticated_user_headers, + fake_gitlab, ): - server_name = request.getfixturevalue(server_name_fixture) + server_name = "unknown_server" + if server_exists: + server_name = jupyter_server.name + await asyncio.sleep(2) # wait a bit for k8s events to be processed in the background _, res = await sanic_client.patch( f"/api/data/notebooks/servers/{server_name}", json=patch, headers=authenticated_user_headers ) assert res.status_code == expected_status_code, res.text - @pytest.mark.asyncio - async def test_old_start_server(self, sanic_client: SanicASGITestClient, authenticated_user_headers, fake_gitlab): - data = { - "branch": "main", - "commit_sha": "ee4b1c9fedc99abe5892ee95320bbd8471c5985b", - "namespace": "test-namespace", - "project": "my-test", - "image": "alpine:3", - } - - _, res = await sanic_client.post( - "/api/data/notebooks/old/servers/", json=data, headers=authenticated_user_headers - ) - - assert res.status_code == 201, res.text - - server_name = res.json["name"] - _, res = await sanic_client.delete( - f"/api/data/notebooks/servers/{server_name}", headers=authenticated_user_headers - ) - - assert res.status_code == 204, res.text - @pytest.mark.asyncio async def test_start_server(self, sanic_client: SanicASGITestClient, authenticated_user_headers, fake_gitlab): data = { "branch": "main", "commit_sha": "ee4b1c9fedc99abe5892ee95320bbd8471c5985b", - "project_id": "test-namespace/my-test", - "launcher_id": "test_launcher", + "namespace": "test-ns-start-server", + "project": "my-test", "image": "alpine:3", } @@ -376,7 +373,7 @@ async def test_start_server(self, sanic_client: SanicASGITestClient, authenticat assert res.status_code == 201, res.text - server_name = res.json["name"] + server_name: str = res.json["name"] _, res = await sanic_client.delete( f"/api/data/notebooks/servers/{server_name}", headers=authenticated_user_headers ) diff --git a/test/bases/renku_data_services/data_api/test_parsing_old_server_options.py b/test/bases/renku_data_services/data_api/test_parsing_old_server_options.py index cc3236fc5..2194dbda7 100644 --- a/test/bases/renku_data_services/data_api/test_parsing_old_server_options.py +++ b/test/bases/renku_data_services/data_api/test_parsing_old_server_options.py @@ -3,7 +3,7 @@ from yaml import safe_load from renku_data_services.crc import models -from renku_data_services.data_api.server_options import ( +from renku_data_services.crc.server_options import ( ServerOptions, ServerOptionsDefaults, generate_default_resource_pool, diff --git a/test/bases/renku_data_services/data_api/test_platform_config.py b/test/bases/renku_data_services/data_api/test_platform_config.py index 49e1ed080..eaca8525f 100644 --- a/test/bases/renku_data_services/data_api/test_platform_config.py +++ b/test/bases/renku_data_services/data_api/test_platform_config.py @@ -3,12 +3,12 @@ import pytest from sanic_testing.testing import SanicASGITestClient -from renku_data_services.app_config import Config +from renku_data_services.data_api.dependencies import DependencyManager from test.bases.renku_data_services.data_api.utils import merge_headers @pytest.mark.asyncio -async def test_get_platform_config(app_config: Config, sanic_client: SanicASGITestClient) -> None: +async def test_get_platform_config(app_manager: DependencyManager, sanic_client: SanicASGITestClient) -> None: _, res = await sanic_client.get("/api/data/platform/config") assert res.status_code == 200, res.text diff --git a/test/bases/renku_data_services/data_api/test_projects.py b/test/bases/renku_data_services/data_api/test_projects.py index 73452fefb..07cb39fba 100644 --- a/test/bases/renku_data_services/data_api/test_projects.py +++ b/test/bases/renku_data_services/data_api/test_projects.py @@ -1,20 +1,19 @@ """Tests for projects blueprint.""" +import asyncio import time -from base64 import b64decode from typing import Any import pytest from httpx import Response +from sanic_testing.testing import SanicASGITestClient from sqlalchemy import select +from syrupy.filters import props from ulid import ULID -from components.renku_data_services.message_queue.avro_models.io.renku.events import v2 as avro_schema_v2 -from renku_data_services.app_config.config import Config -from renku_data_services.message_queue.avro_models.io.renku.events.v2.member_role import MemberRole -from renku_data_services.message_queue.models import deserialize_binary +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.users.models import UserInfo -from test.bases.renku_data_services.data_api.utils import deserialize_event, merge_headers +from test.bases.renku_data_services.data_api.utils import merge_headers @pytest.fixture @@ -44,19 +43,20 @@ async def update_project_helper(project_id: str, headers: dict[str, str] | None @pytest.mark.asyncio -async def test_project_creation(sanic_client, user_headers, regular_user: UserInfo, app_config) -> None: +async def test_project_creation(sanic_client, user_headers, regular_user: UserInfo, app_manager) -> None: payload = { "name": "Renku Native Project", "slug": "project-slug", "description": "First Renku native project", "visibility": "public", "repositories": ["http://renkulab.io/repository-1", "http://renkulab.io/repository-2"], - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), "keywords": ["keyword 1", "keyword.2", "keyword-3", "KEYWORD_4"], "documentation": "$\\sqrt(2)$", + "secrets_mount_directory": "/etc/renku_secrets", } - await app_config.event_repo.delete_all_events() + await app_manager.search_updates_repo.clear_all() _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) @@ -75,23 +75,14 @@ async def test_project_creation(sanic_client, user_headers, regular_user: UserIn assert project["created_by"] == "user" assert "template_id" not in project or project["template_id"] is None assert project["is_template"] is False + assert project["secrets_mount_directory"] == "/etc/renku_secrets" + app_manager.metrics.project_created.assert_called_once() project_id = project["id"] - events = await app_config.event_repo.get_pending_events() - assert len(events) == 2 - project_created_event = next((e for e in events if e.get_message_type() == "project.created"), None) - assert project_created_event - created_event = deserialize_binary( - b64decode(project_created_event.payload["payload"]), avro_schema_v2.ProjectCreated - ) - assert created_event.name == payload["name"] - assert created_event.slug == payload["slug"] - assert created_event.repositories == payload["repositories"] - project_auth_added = next((e for e in events if e.get_message_type() == "projectAuth.added"), None) - assert project_auth_added - auth_event = deserialize_binary(b64decode(project_auth_added.payload["payload"]), avro_schema_v2.ProjectMemberAdded) - assert auth_event.userId == "user" - assert auth_event.role == MemberRole.OWNER + search_updates = await app_manager.search_updates_repo.select_next(10) + assert len(search_updates) == 1 + for e in search_updates: + assert e.entity_type == "Project" _, response = await sanic_client.get(f"/api/data/projects/{project_id}", headers=user_headers) @@ -119,7 +110,7 @@ async def test_project_creation(sanic_client, user_headers, regular_user: UserIn assert "template_id" not in project or project["template_id"] is None assert project["is_template"] is False - # same as above, but using namespace/slug to retrieve the pr + # same as above, but using namespace/slug to retrieve the project _, response = await sanic_client.get( f"/api/data/namespaces/{payload['namespace']}/projects/{payload['slug']}", params={"with_documentation": True}, @@ -128,9 +119,10 @@ async def test_project_creation(sanic_client, user_headers, regular_user: UserIn assert response.status_code == 200, response.text project = response.json + assert project["id"] == project_id assert project["name"] == "Renku Native Project" assert project["slug"] == "project-slug" - assert project["namespace"] == regular_user.namespace.slug + assert project["namespace"] == regular_user.namespace.path.serialize() assert project["documentation"] == "$\\sqrt(2)$" @@ -140,7 +132,7 @@ async def test_project_creation_with_default_values( ) -> None: payload = { "name": "Project with Default Values", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), } _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) @@ -156,6 +148,7 @@ async def test_project_creation_with_default_values( assert project["created_by"] == "user" assert len(project["keywords"]) == 0 assert len(project["repositories"]) == 0 + assert project["secrets_mount_directory"] == "/secrets" @pytest.mark.asyncio @@ -167,17 +160,17 @@ async def test_create_project_with_invalid_visibility(sanic_client, user_headers @pytest.mark.asyncio -@pytest.mark.parametrize("keyword", ["invalid chars '", "Nön English"]) -async def test_create_project_with_invalid_keywords(sanic_client, user_headers, keyword) -> None: +async def test_create_project_with_invalid_keywords(sanic_client, user_headers) -> None: + keyword = "this keyword is way too long........................................................................" _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json={"keywords": [keyword]}) assert response.status_code == 422, response.text - assert "String should match pattern '^[A-Za-z0-9\\s\\-_.]*$'" in response.json["error"]["message"] + assert "String should have at most 99 characters" in response.json["error"]["message"] @pytest.mark.asyncio async def test_project_creation_with_invalid_namespace(sanic_client, user_headers, member_1_user: UserInfo) -> None: - namespace = member_1_user.namespace.slug + namespace = member_1_user.namespace.path.serialize() _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) assert response.status_code == 200, response.text payload = { @@ -193,7 +186,7 @@ async def test_project_creation_with_invalid_namespace(sanic_client, user_header @pytest.mark.asyncio async def test_project_creation_with_conflicting_slug(sanic_client, user_headers, regular_user) -> None: - namespace = regular_user.namespace.slug + namespace = regular_user.namespace.path.serialize() payload = { "name": "Existing project", "namespace": namespace, @@ -212,6 +205,42 @@ async def test_project_creation_with_conflicting_slug(sanic_client, user_headers assert response.status_code == 409, response.text +@pytest.mark.asyncio +async def test_project_creation_with_duplicate_repositories(sanic_client, user_headers, regular_user) -> None: + namespace = regular_user.namespace.path.serialize() + payload = { + "name": "My Project", + "namespace": namespace, + "repositories": [ + "https://github.com/SwissDataScienceCenter/renku-data-services.git", + "https://github.com/SwissDataScienceCenter/renku-data-services.git", + ], + } + + _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + project = response.json + assert project["repositories"] == ["https://github.com/SwissDataScienceCenter/renku-data-services.git"] + + +@pytest.mark.asyncio +async def test_project_creation_with_invalid_repository(sanic_client, user_headers, regular_user) -> None: + namespace = regular_user.namespace.path.serialize() + payload = { + "name": "My Project", + "namespace": namespace, + "repositories": [ + "git@github.com:SwissDataScienceCenter/renku-data-services.git", + ], + } + + _, response = await sanic_client.post("/api/data/projects", headers=user_headers, json=payload) + + assert response.status_code == 422, response.text + assert "is not a valid HTTP or HTTPS URL" in response.json["error"]["message"] + + @pytest.mark.asyncio async def test_get_a_project(create_project, get_project) -> None: # Create some projects @@ -226,7 +255,7 @@ async def test_get_a_project(create_project, get_project) -> None: @pytest.mark.asyncio -async def test_get_all_projects_with_pagination(create_project, sanic_client, user_headers) -> None: +async def test_get_all_projects_with_pagination(create_project, sanic_client, user_headers, snapshot) -> None: # Create some projects for i in range(1, 10): await create_project(f"Project {i}") @@ -256,6 +285,7 @@ async def test_get_all_projects_with_pagination(create_project, sanic_client, us assert response.headers["per-page"] == "4" assert response.headers["total"] == "9" assert response.headers["total-pages"] == "3" + assert response.json == snapshot(exclude=props("id", "creation_date", "updated_at", "etag")) @pytest.mark.asyncio @@ -328,7 +358,9 @@ async def test_result_is_sorted_by_creation_date(create_project, sanic_client, u @pytest.mark.asyncio -async def test_delete_project(create_project, sanic_client, user_headers, app_config) -> None: +async def test_delete_project(create_project, sanic_client, user_headers, app_manager) -> None: + await app_manager.search_updates_repo.clear_all() + # Create some projects await create_project("Project 1") await create_project("Project 2") @@ -342,14 +374,12 @@ async def test_delete_project(create_project, sanic_client, user_headers, app_co assert response.status_code == 204, response.text - events = await app_config.event_repo.get_pending_events() - assert len(events) == 15 - project_removed_event = next((e for e in events if e.get_message_type() == "project.removed"), None) - assert project_removed_event - removed_event = deserialize_binary( - b64decode(project_removed_event.payload["payload"]), avro_schema_v2.ProjectRemoved - ) - assert removed_event.id == project_id + # Check search updates + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 5 + assert len(set([e.entity_id for e in search_updates])) == 5 + deleted_project = next(x for x in search_updates if x.entity_id == project["id"]) + assert deleted_project.payload == {"id": project["id"], "deleted": True} # Get all projects _, response = await sanic_client.get("/api/data/projects", headers=user_headers) @@ -359,7 +389,8 @@ async def test_delete_project(create_project, sanic_client, user_headers, app_co @pytest.mark.asyncio -async def test_patch_project(create_project, get_project, sanic_client, user_headers, app_config) -> None: +async def test_patch_project(create_project, get_project, sanic_client, user_headers, app_manager) -> None: + await app_manager.search_updates_repo.clear_all() # Create some projects await create_project("Project 1") project = await create_project("Project 2", repositories=["http://renkulab.io/repository-0"], keywords=["keyword"]) @@ -374,22 +405,17 @@ async def test_patch_project(create_project, get_project, sanic_client, user_hea "visibility": "public", "repositories": ["http://renkulab.io/repository-1", "http://renkulab.io/repository-2"], "documentation": "$\\infty$", + "secrets_mount_directory": "/etc/new/location", } project_id = project["id"] _, response = await sanic_client.patch(f"/api/data/projects/{project_id}", headers=headers, json=patch) assert response.status_code == 200, response.text - events = await app_config.event_repo.get_pending_events() - assert len(events) == 11 - project_updated_event = next((e for e in events if e.get_message_type() == "project.updated"), None) - assert project_updated_event - updated_event = deserialize_binary( - b64decode(project_updated_event.payload["payload"]), avro_schema_v2.ProjectUpdated - ) - assert updated_event.name == patch["name"] - assert updated_event.description == patch["description"] - assert updated_event.repositories == patch["repositories"] + # Check search updates + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 3 + assert len(set([e.entity_id for e in search_updates])) == 3 # Get the project project = await get_project(project_id=project_id) @@ -404,6 +430,7 @@ async def test_patch_project(create_project, get_project, sanic_client, user_hea "http://renkulab.io/repository-2", } assert "documentation" not in project + assert project["secrets_mount_directory"] == "/etc/new/location" _, response = await sanic_client.get( f"/api/data/projects/{project_id}", params={"with_documentation": True}, headers=user_headers @@ -416,7 +443,7 @@ async def test_patch_project(create_project, get_project, sanic_client, user_hea @pytest.mark.asyncio async def test_keywords_are_not_modified_in_patch( - create_project, get_project, sanic_client, user_headers, app_config + create_project, get_project, sanic_client, user_headers, app_manager ) -> None: # Create some projects await create_project("Project 1") @@ -441,7 +468,7 @@ async def test_keywords_are_not_modified_in_patch( @pytest.mark.asyncio async def test_keywords_are_deleted_in_patch( - create_project, get_project, sanic_client, user_headers, app_config + create_project, get_project, sanic_client, user_headers, app_manager ) -> None: # Create some projects await create_project("Project 1") @@ -512,7 +539,7 @@ async def test_patch_visibility_to_public_shows_project( @pytest.mark.asyncio -@pytest.mark.parametrize("field", ["id", "slug", "created_by", "creation_date"]) +@pytest.mark.parametrize("field", ["id", "created_by", "creation_date"]) async def test_cannot_patch_reserved_fields(create_project, get_project, sanic_client, user_headers, field) -> None: project = await create_project("Project 1") original_value = project[field] @@ -559,7 +586,7 @@ async def test_cannot_patch_without_if_match_header(create_project, get_project, async def test_patch_project_invalid_namespace( create_project, sanic_client, user_headers, member_1_user: UserInfo ) -> None: - namespace = member_1_user.namespace.slug + namespace = member_1_user.namespace.path.serialize() _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}", headers=user_headers) assert response.status_code == 200, response.text project = await create_project("Project 1") @@ -603,6 +630,79 @@ async def test_patch_description_as_editor_and_keep_namespace_and_visibility( assert response.json.get("description") == "Updated description" +@pytest.mark.asyncio +async def test_patch_project_slug( + sanic_client, + create_project, + get_project, + user_headers, +) -> None: + await create_project("Project 1") + await create_project("Project 2") + project = await create_project("My project", documentation="Hello, World!") + project_id = project["id"] + namespace = project["namespace"] + old_slug = project["slug"] + await create_project("Project 3") + + # Patch a project + headers = merge_headers(user_headers, {"If-Match": project["etag"]}) + new_slug = "some-updated-slug" + patch = {"slug": new_slug} + _, response = await sanic_client.patch(f"/api/data/projects/{project_id}", headers=headers, json=patch) + + assert response.status_code == 200, response.text + + # Check that the project's slug has been updated + project = await get_project(project_id=project_id) + assert project["id"] == project_id + assert project["name"] == "My project" + assert project["namespace"] == namespace + assert project["slug"] == new_slug + + # Check that we can get the project with the new slug + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}/projects/{new_slug}", headers=user_headers) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == project_id + + # Check that we can get the project with the old slug + _, response = await sanic_client.get(f"/api/data/namespaces/{namespace}/projects/{old_slug}", headers=user_headers) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == project_id + _, response = await sanic_client.get( + f"/api/data/namespaces/{namespace}/projects/{old_slug}", + params={"with_documentation": True}, + headers=user_headers, + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == project_id + assert response.json.get("documentation") == "Hello, World!" + + +@pytest.mark.asyncio +async def test_patch_project_reset_secrets_mount_directory( + create_project, get_project, sanic_client, user_headers +) -> None: + project = await create_project("My Project", secrets_mount_directory="/etc/fancy/location") + assert project["secrets_mount_directory"] == "/etc/fancy/location" + + # Patch a project + user_headers.update({"If-Match": project["etag"]}) + patch = {"secrets_mount_directory": ""} + project_id = project["id"] + _, response = await sanic_client.patch(f"/api/data/projects/{project_id}", headers=user_headers, json=patch) + + assert response.status_code == 200, response.text + + # Get the project + project = await get_project(project_id=project_id) + + assert project["secrets_mount_directory"] == "/secrets" + + @pytest.mark.asyncio async def test_get_all_projects_for_specific_user( create_project, sanic_client, user_headers, admin_headers, unauthorized_headers @@ -693,7 +793,7 @@ async def test_get_projects_with_direct_membership(sanic_client, user_headers, m # Add member_1 to Project 2 roles = [{"id": member_1_user.id, "role": "editor"}] _, response = await sanic_client.patch( - f"/api/data/projects/{project_2["id"]}/members", headers=user_headers, json=roles + f"/api/data/projects/{project_2['id']}/members", headers=user_headers, json=roles ) assert response.status_code == 200, response.text @@ -759,7 +859,7 @@ async def test_add_project_members( sanic_client, regular_user, user_headers, - app_config, + app_manager, member_1_user: UserInfo, member_2_user: UserInfo, ) -> None: @@ -798,7 +898,9 @@ async def test_add_project_members( @pytest.mark.asyncio -async def test_delete_project_members(create_project, sanic_client, user_headers, app_config: Config) -> None: +async def test_delete_project_members( + create_project, sanic_client, user_headers, app_manager: DependencyManager +) -> None: project = await create_project("Project 1") project_id = project["id"] @@ -824,7 +926,7 @@ async def test_delete_project_members(create_project, sanic_client, user_headers @pytest.mark.asyncio -async def test_null_byte_middleware(sanic_client, user_headers, regular_user, app_config) -> None: +async def test_null_byte_middleware(sanic_client, user_headers, regular_user, app_manager) -> None: payload = { "name": "Renku Native \x00Project", "slug": "project-slug", @@ -1011,7 +1113,7 @@ async def test_get_project_permissions_cascading_from_group( @pytest.mark.asyncio async def test_project_slug_case( - app_config: Config, + app_manager: DependencyManager, create_project, create_group, sanic_client, @@ -1041,7 +1143,7 @@ async def test_project_slug_case( assert res.status_code == 422 # Change the slug of the project to be upper case in the DB uppercase_slug = "NEW_project_SLUG" - async with app_config.db.async_session_maker() as session, session.begin(): + async with app_manager.config.db.async_session_maker() as session, session.begin(): stmt = select(ProjectORM).where(ProjectORM.id == project_id) proj_orm = await session.scalar(stmt) assert proj_orm is not None @@ -1071,7 +1173,10 @@ async def test_project_slug_case( @pytest.mark.asyncio -async def test_project_copy_basics(sanic_client, app_config, user_headers, regular_user, create_project) -> None: +async def test_project_copy_basics( + sanic_client, app_manager, user_headers, regular_user, create_project, snapshot +) -> None: + await app_manager.search_updates_repo.clear_all() await create_project("Project 1") project = await create_project( "Project 2", @@ -1079,6 +1184,7 @@ async def test_project_copy_basics(sanic_client, app_config, user_headers, regul keywords=["tag 1", "tag 2"], repositories=["http://repository-1.ch", "http://repository-2.ch"], visibility="public", + documentation="test documentation", ) await create_project("Project 3") project_id = project["id"] @@ -1086,35 +1192,32 @@ async def test_project_copy_basics(sanic_client, app_config, user_headers, regul payload = { "name": "Renku Native Project", "slug": "project-slug", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), } - await app_config.event_repo.delete_all_events() - _, response = await sanic_client.post(f"/api/data/projects/{project_id}/copies", headers=user_headers, json=payload) assert response.status_code == 201, response.text copy_project = response.json - assert copy_project["name"] == "Renku Native Project" - assert copy_project["slug"] == "project-slug" - assert copy_project["created_by"] == "user" - assert copy_project["namespace"] == regular_user.namespace.slug - assert copy_project["description"] == "Template project" - assert copy_project["visibility"] == project["visibility"] - assert copy_project["keywords"] == ["tag 1", "tag 2"] - assert copy_project["repositories"] == project["repositories"] - - events = await app_config.event_repo.get_pending_events() - assert len(events) == 2 - project_created_event = next(e for e in events if e.get_message_type() == "project.created") - project_created = deserialize_event(project_created_event) - assert project_created.name == payload["name"] - assert project_created.slug == payload["slug"] - assert project_created.repositories == project["repositories"] - project_auth_added_event = next(e for e in events if e.get_message_type() == "projectAuth.added") - project_auth_added = deserialize_event(project_auth_added_event) - assert project_auth_added.userId == "user" - assert project_auth_added.role == MemberRole.OWNER + assert copy_project["namespace"] == regular_user.namespace.path.serialize() + assert copy_project["template_id"] == project_id + assert copy_project == snapshot(exclude=props("id", "updated_at", "creation_date", "etag", "template_id")) + + _, response = await sanic_client.get( + f"/api/data/projects/{copy_project["id"]}", params={"with_documentation": True}, headers=user_headers + ) + assert response.status_code == 200, response.text + copy_project = response.json + assert copy_project == snapshot(exclude=props("id", "updated_at", "creation_date", "etag", "template_id")) + + # Check search updates + search_updates = await app_manager.search_updates_repo.select_next(20) + assert len(search_updates) == 4 + assert len(set([e.entity_type for e in search_updates])) == 1 + assert search_updates[0].entity_type == "Project" + search_doc = next(x for x in search_updates if x.entity_id == copy_project["id"]) + assert search_doc.payload["slug"] == "project-slug" + assert search_doc.payload["name"] == "Renku Native Project" project_id = copy_project["id"] @@ -1125,7 +1228,7 @@ async def test_project_copy_basics(sanic_client, app_config, user_headers, regul assert copy_project["name"] == "Renku Native Project" assert copy_project["slug"] == "project-slug" assert copy_project["created_by"] == "user" - assert copy_project["namespace"] == regular_user.namespace.slug + assert copy_project["namespace"] == regular_user.namespace.path.serialize() assert copy_project["description"] == "Template project" assert copy_project["visibility"] == "public" assert copy_project["keywords"] == ["tag 1", "tag 2"] @@ -1145,10 +1248,10 @@ async def test_project_copy_includes_session_launchers( project = await create_project("Project") project_id = project["id"] environment = await create_session_environment("Some environment") - launcher_1 = await create_session_launcher("Launcher 1", project["id"], environment={"id": environment["id"]}) - launcher_2 = await create_session_launcher("Launcher 2", project["id"], environment={"id": environment["id"]}) + launcher_1 = await create_session_launcher("Launcher 1", project_id, environment={"id": environment["id"]}) + launcher_2 = await create_session_launcher("Launcher 2", project_id, environment={"id": environment["id"]}) - copy_project = await create_project_copy(project_id, regular_user.namespace.slug, "Copy Project") + copy_project = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy Project") project_id = copy_project["id"] _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_launchers", headers=user_headers) @@ -1157,7 +1260,152 @@ async def test_project_copy_includes_session_launchers( assert {launcher["name"] for launcher in launchers} == {"Launcher 1", "Launcher 2"} assert launchers[0]["project_id"] == launchers[1]["project_id"] == project_id # NOTE: Check that new launchers are created - assert {launcher["id"] for launcher in launchers} != {launcher_1["id"], launcher_2["id"]} + assert not any({launcher["id"] in {launcher_1["id"], launcher_2["id"]} for launcher in launchers}) + # NOTE: Check that session environments are the same since they are global + assert launchers[0]["environment"]["id"] == launchers[1]["environment"]["id"] == environment["id"] + + +@pytest.mark.asyncio +async def test_project_copy_creates_new_custom_environment_instance( + sanic_client, + user_headers, + regular_user, + create_project, + create_session_launcher, + create_project_copy, + create_resource_pool, +) -> None: + project = await create_project("Project") + project_id = project["id"] + resource_pool = await create_resource_pool(admin=True) + launcher_with_custom_env = await create_session_launcher( + "Launcher", + project_id, + environment={ + "container_image": "some_image:some_tag", + "environment_kind": "CUSTOM", + "environment_image_source": "image", + "name": "custom_env", + "description": "Custom environment", + "port": 42, + "default_url": "/lab", + "uid": 2000, + "gid": 2000, + "working_directory": "/work", + "mount_directory": "/work", + "command": ["python"], + "args": ["script.py"], + }, + resource_class_id=resource_pool["classes"][0]["id"], + disk_storage=42, + env_variables=[{"name": "KEY_NUMBER_1", "value": "a value"}], + ) + + await asyncio.sleep(1) + + copy_project = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy Project") + project_id = copy_project["id"] + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_launchers", headers=user_headers) + + assert response.status_code == 200, response.text + launcher = response.json[0] + # NOTE: Check that a new launcher is created + assert launcher["id"] != launcher_with_custom_env["id"] + assert launcher["project_id"] == project_id + assert launcher["name"] == launcher_with_custom_env["name"] + assert launcher["description"] == launcher_with_custom_env["description"] + assert launcher["resource_class_id"] == launcher_with_custom_env["resource_class_id"] + assert launcher["disk_storage"] == launcher_with_custom_env["disk_storage"] + assert launcher["creation_date"] != launcher_with_custom_env["creation_date"] + # NOTE: Check that a new environment is created + environment = launcher["environment"] + assert environment["id"] != launcher_with_custom_env["environment"]["id"] + assert environment["name"] == launcher_with_custom_env["environment"]["name"] + assert environment["creation_date"] != launcher_with_custom_env["environment"]["creation_date"] + assert environment["description"] == launcher_with_custom_env["environment"]["description"] + assert environment["container_image"] == launcher_with_custom_env["environment"]["container_image"] + assert environment["default_url"] == launcher_with_custom_env["environment"]["default_url"] + assert environment["uid"] == launcher_with_custom_env["environment"]["uid"] + assert environment["gid"] == launcher_with_custom_env["environment"]["gid"] + assert environment["working_directory"] == launcher_with_custom_env["environment"]["working_directory"] + assert environment["mount_directory"] == launcher_with_custom_env["environment"]["mount_directory"] + assert environment["port"] == launcher_with_custom_env["environment"]["port"] + assert environment["command"] == launcher_with_custom_env["environment"]["command"] + assert environment["args"] == launcher_with_custom_env["environment"]["args"] + assert environment["is_archived"] == launcher_with_custom_env["environment"]["is_archived"] + + +@pytest.mark.asyncio +async def test_project_copy_creates_new_build_and_environment_instances( + sanic_client, + user_headers, + regular_user, + create_project, + create_session_launcher, + create_project_copy, + create_resource_pool, + snapshot, +) -> None: + project = await create_project("Project") + project_id = project["id"] + resource_pool = await create_resource_pool(admin=True) + launcher = await create_session_launcher( + "Launcher", + project_id, + environment={ + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + resource_class_id=resource_pool["classes"][0]["id"], + disk_storage=42, + ) + + await asyncio.sleep(1) + + copy_project = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy Project") + copy_project_id = copy_project["id"] + _, response = await sanic_client.get( + f"/api/data/projects/{copy_project_id}/session_launchers", headers=user_headers + ) + + assert response.status_code == 200, response.text + copied_launcher = response.json[0] + # NOTE: Check that a new launcher is created + assert copied_launcher["id"] != launcher["id"] + assert copied_launcher["creation_date"] != launcher["creation_date"] + assert copied_launcher["project_id"] == copy_project_id + assert copied_launcher == snapshot(exclude=props("id", "creation_date", "environment", "project_id")) + # NOTE: Check that a new environment is created + environment = copied_launcher["environment"] + assert environment["id"] != launcher["environment"]["id"] + assert environment["creation_date"] != launcher["environment"]["creation_date"] + assert environment == snapshot(exclude=props("id", "creation_date")) + # NOTE: Check that build parameters are copied + build_parameters = environment["build_parameters"] + assert build_parameters == snapshot + + # Patch the build parameters to make sure that it doesn't change the original builder parameter + patch_payload = {"environment": {"build_parameters": {"repository": "new_repo"}}} + _, response = await sanic_client.patch( + f"/api/data/session_launchers/{copied_launcher['id']}", headers=user_headers, json=patch_payload + ) + assert response.status_code == 200, response.text + + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_launchers", headers=user_headers) + + assert response.status_code == 200, response.text + original_build_parameters = response.json[0]["environment"]["build_parameters"] + assert original_build_parameters["repository"] == "https://github.com/some/repo" + + _, response = await sanic_client.get( + f"/api/data/projects/{copy_project_id}/session_launchers", headers=user_headers + ) + + assert response.status_code == 200, response.text + copy_build_parameters = response.json[0]["environment"]["build_parameters"] + assert copy_build_parameters["repository"] == "new_repo" @pytest.mark.asyncio @@ -1174,7 +1422,7 @@ async def test_project_copy_includes_data_connector_links( data_connector_1, link_1 = await create_data_connector_and_link_project("Data Connector 1", project_id=project_id) data_connector_2, link_2 = await create_data_connector_and_link_project("Data Connector 2", project_id=project_id) - copy_project = await create_project_copy(project_id, regular_user.namespace.slug, "Copy Project") + copy_project = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy Project") project_id = copy_project["id"] _, response = await sanic_client.get(f"/api/data/projects/{project_id}/data_connector_links", headers=user_headers) @@ -1186,6 +1434,49 @@ async def test_project_copy_includes_data_connector_links( assert {d["id"] for d in data_connector_links} != {link_1["id"], link_2["id"]} +@pytest.mark.asyncio +async def test_project_copy_includes_public_data_connector_links_owned_by_others( + sanic_client: SanicASGITestClient, + user_headers: dict[str, str], + regular_user: UserInfo, + member_1_headers: dict[str, str], + member_1_user: UserInfo, + member_2_headers: dict[str, str], + member_2_user: UserInfo, + create_project, + create_project_copy, + create_data_connector, + link_data_connector, +) -> None: + project = await create_project("Project", visibility="public") + project_id = project["id"] + dc1 = await create_data_connector("dc1", member_1_user, member_1_headers, visibility="public") + dc2 = await create_data_connector("dc2", member_1_user, member_1_headers, visibility="public") + assert "id" in dc1 + assert "id" in dc2 + link1_res = await link_data_connector(project_id, dc1["id"], user_headers) + link2_res = await link_data_connector(project_id, dc2["id"], user_headers) + link1 = link1_res.json + link2 = link2_res.json + + copy_project = await create_project_copy( + project_id, + member_2_user.namespace.path.serialize(), + "Copy Project", + user=member_2_user, + ) + project_copy_id = copy_project["id"] + _, response = await sanic_client.get( + f"/api/data/projects/{project_copy_id}/data_connector_links", headers=member_2_headers + ) + assert response.status_code == 200, response.text + data_connector_links = response.json + assert {d["data_connector_id"] for d in data_connector_links} == {dc1["id"], dc2["id"]} + assert data_connector_links[0]["project_id"] == data_connector_links[1]["project_id"] == project_copy_id + # NOTE: Check that new data connector links are created + assert {d["id"] for d in data_connector_links} != {link1["id"], link2["id"]} + + @pytest.mark.asyncio async def test_project_get_all_copies( sanic_client, admin_user, regular_user, admin_headers, user_headers, create_project, create_project_copy @@ -1195,10 +1486,10 @@ async def test_project_get_all_copies( await create_project("Project 3") project_id = project["id"] - copy_1 = await create_project_copy(project_id, regular_user.namespace.slug, "Copy 1") - copy_2 = await create_project_copy(project_id, admin_user.namespace.slug, "Copy 2", user=admin_user) + copy_1 = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy 1") + copy_2 = await create_project_copy(project_id, admin_user.namespace.path.serialize(), "Copy 2", user=admin_user) copy_3 = await create_project_copy( - project_id, admin_user.namespace.slug, "Copy 3", user=admin_user, visibility="public" + project_id, admin_user.namespace.path.serialize(), "Copy 3", user=admin_user, visibility="public" ) # NOTE: Admins can see all copies @@ -1223,24 +1514,24 @@ async def test_project_get_all_writable_copies( project = await create_project("Project") project_id = project["id"] - copy_1 = await create_project_copy(project_id, regular_user.namespace.slug, "Copy 1") + copy_1 = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy 1") copy_2 = await create_project_copy( project_id, - admin_user.namespace.slug, + admin_user.namespace.path.serialize(), "Copy 2", user=admin_user, visibility="public", ) copy_3 = await create_project_copy( project_id, - admin_user.namespace.slug, + admin_user.namespace.path.serialize(), "Copy 3", user=admin_user, members=[{"id": regular_user.id, "role": "viewer"}], ) copy_4 = await create_project_copy( project_id, - admin_user.namespace.slug, + admin_user.namespace.path.serialize(), "Copy 4", user=admin_user, members=[{"id": regular_user.id, "role": "editor"}], @@ -1266,8 +1557,8 @@ async def test_project_copies_are_not_deleted_when_template_is_deleted( project = await create_project("Template Project") project_id = project["id"] - copy_1 = await create_project_copy(project_id, regular_user.namespace.slug, "Copy 1") - copy_2 = await create_project_copy(project_id, regular_user.namespace.slug, "Copy 2") + copy_1 = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy 1") + copy_2 = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy 2") _, response = await sanic_client.delete(f"/api/data/projects/{project_id}", headers=user_headers) @@ -1287,8 +1578,12 @@ async def test_project_copy_and_set_visibility( project = await create_project("Template Project") project_id = project["id"] - public_copy = await create_project_copy(project_id, regular_user.namespace.slug, "Copy 1", visibility="public") - private_copy = await create_project_copy(project_id, regular_user.namespace.slug, "Copy 2", visibility="private") + public_copy = await create_project_copy( + project_id, regular_user.namespace.path.serialize(), "Copy 1", visibility="public" + ) + private_copy = await create_project_copy( + project_id, regular_user.namespace.path.serialize(), "Copy 2", visibility="private" + ) _, response = await sanic_client.get("/api/data/projects", headers=user_headers) @@ -1305,7 +1600,7 @@ async def test_project_copy_non_existing_project(sanic_client, user_headers, reg payload = { "name": "Renku Native Project", "slug": "project-slug", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), } _, response = await sanic_client.post(f"/api/data/projects/{project_id}/copies", headers=user_headers, json=payload) @@ -1321,7 +1616,7 @@ async def test_project_copy_invalid_project_id(sanic_client, user_headers, regul payload = { "name": "Renku Native Project", "slug": "project-slug", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), } _, response = await sanic_client.post(f"/api/data/projects/{project_id}/copies", headers=user_headers, json=payload) @@ -1337,7 +1632,7 @@ async def test_project_copy_with_no_access(sanic_client, user_headers, regular_u payload = { "name": "Renku Native Project", "slug": "project-slug", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), } _, response = await sanic_client.post(f"/api/data/projects/{project_id}/copies", headers=user_headers, json=payload) @@ -1362,19 +1657,18 @@ async def test_project_copy_succeeds_even_if_data_connector_is_inaccessible( environment = await create_session_environment("Environment") await create_session_launcher("Launcher", project["id"], environment={"id": environment["id"]}) # NOTE: Create a data connector that regular user cannot access - await create_data_connector_and_link_project("Connector", project_id=project_id, admin=True) + await create_data_connector_and_link_project("Admin Connector", project_id=project_id, admin=True) payload = { "name": "Copy Project", "slug": "project-slug", - "namespace": regular_user.namespace.slug, + "namespace": regular_user.namespace.path.serialize(), } _, response = await sanic_client.post(f"/api/data/projects/{project_id}/copies", headers=user_headers, json=payload) - # NOTE: The copy is created, but the status code indicates that one or more data connectors cannot be copied - assert response.status_code == 403, response.text - assert "The project was copied but there is no permission to copy data connectors" in response.text + # TODO: What should happen to DCs and DC links when you copy a project? + assert response.status_code == 201, response.text @pytest.mark.asyncio @@ -1403,7 +1697,7 @@ async def test_project_unlink_from_template_project( project = await create_project("Project") project_id = project["id"] - project = await create_project_copy(project_id, regular_user.namespace.slug, "Copy Project") + project = await create_project_copy(project_id, regular_user.namespace.path.serialize(), "Copy Project") project_id = project["id"] # NOTE: A null value won't change anything @@ -1426,3 +1720,135 @@ async def test_project_unlink_from_template_project( project = await get_project(project_id) assert "template_id" not in project or project["template_id"] is None + + +@pytest.mark.asyncio +async def test_get_project_after_group_moved( + create_project, + create_group, + sanic_client, + user_headers, +) -> None: + await create_project("Project 1") + await create_project("Project 2") + group = await create_group("test-group") + group_slug = group["slug"] + project = await create_project("My project", namespace=group_slug, documentation="Hello, World!") + project_id = project["id"] + await create_project("Project 3") + + new_group_slug = "test-group-updated" + patch = {"slug": new_group_slug} + _, response = await sanic_client.patch(f"/api/data/groups/{group_slug}", headers=user_headers, json=patch) + assert response.status_code == 200, response.text + + # Check that the project's namespace has been updated + _, response = await sanic_client.get(f"/api/data/projects/{project_id}", headers=user_headers) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == project_id + assert response.json.get("namespace") == new_group_slug + assert response.json.get("slug") == "my-project" + + # Check that we can get the project with the new namespace + _, response = await sanic_client.get( + f"/api/data/namespaces/{new_group_slug}/projects/{project['slug']}", headers=user_headers + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == project_id + + # Check that we can get the project with the old namespace + _, response = await sanic_client.get( + f"/api/data/namespaces/{group_slug}/projects/{project['slug']}", headers=user_headers + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == project_id + _, response = await sanic_client.get( + f"/api/data/namespaces/{group_slug}/projects/{project['slug']}", + params={"with_documentation": True}, + headers=user_headers, + ) + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == project_id + assert response.json.get("documentation") == "Hello, World!" + + +@pytest.mark.asyncio +async def test_migrate_v1_project( + sanic_client, + app_manager, + user_headers, + regular_user: UserInfo, +) -> None: + v1_id = 1122 + v1_project = { + "project": { + "name": "New Migrated Project", + "slug": "new-project-slug", + "namespace": regular_user.namespace.path.serialize(), + "description": "Old project for migration", + "repositories": ["http://old-repository.com"], + "visibility": "private", + "keywords": ["old", "project"], + }, + "session_launcher": { + "name": "My Renku Session :)", + "container_image": "renku/renkulab-py:3.10-0.18.1", + "default_url": "/lab", + }, + } + + _, response = await sanic_client.post( + f"/api/data/renku_v1_projects/{v1_id}/migrations", headers=user_headers, json=v1_project + ) + + assert response.status_code == 201, response.text + migrated_project = response.json + assert migrated_project["name"] == "New Migrated Project" + assert migrated_project["slug"] == "new-project-slug" + assert migrated_project["created_by"] == "user" + assert migrated_project["namespace"] == regular_user.namespace.path.serialize() + assert migrated_project["description"] == "Old project for migration" + assert migrated_project["visibility"] == "private" + assert migrated_project["keywords"] == ["old", "project"] + assert migrated_project["repositories"] == ["http://old-repository.com"] + + migrated_project_id = migrated_project["id"] + _, response = await sanic_client.get(f"/api/data/projects/{migrated_project_id}", headers=user_headers) + assert response.status_code == 200, response.text + migrated_project = response.json + assert migrated_project["name"] == "New Migrated Project" + assert migrated_project["slug"] == "new-project-slug" + assert migrated_project["created_by"] == "user" + assert migrated_project["namespace"] == regular_user.namespace.path.serialize() + assert migrated_project["description"] == "Old project for migration" + assert migrated_project["visibility"] == "private" + assert migrated_project["keywords"] == ["old", "project"] + assert migrated_project["repositories"] == ["http://old-repository.com"] + + _, response = await sanic_client.get(f"/api/data/renku_v1_projects/{v1_id}/migrations", headers=user_headers) + assert response.status_code == 200, response.text + migrated_project = response.json + assert migrated_project["name"] == "New Migrated Project" + assert migrated_project["slug"] == "new-project-slug" + assert migrated_project["created_by"] == "user" + assert migrated_project["namespace"] == regular_user.namespace.path.serialize() + assert migrated_project["description"] == "Old project for migration" + assert migrated_project["visibility"] == "private" + assert migrated_project["keywords"] == ["old", "project"] + assert migrated_project["repositories"] == ["http://old-repository.com"] + project_id = migrated_project["id"] + + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/migration_info", headers=user_headers) + assert response.status_code == 200, response.text + migrated_project = response.json + assert migrated_project["v1_id"] == v1_id + assert migrated_project["project_id"] == project_id + + _, response = await sanic_client.get("/api/data/renku_v1_projects/migrations", headers=user_headers) + assert response.status_code == 200, response.text + migrated_projects = response.json + assert {project_migration["v1_id"] for project_migration in migrated_projects} == {1122} diff --git a/test/bases/renku_data_services/data_api/test_repositories.py b/test/bases/renku_data_services/data_api/test_repositories.py index de4b61f1a..50ec3833b 100644 --- a/test/bases/renku_data_services/data_api/test_repositories.py +++ b/test/bases/renku_data_services/data_api/test_repositories.py @@ -8,19 +8,19 @@ from sanic import Sanic from sanic_testing.testing import SanicASGITestClient -from renku_data_services.app_config import Config from renku_data_services.connected_services.dummy_async_oauth2_client import DummyAsyncOAuth2Client from renku_data_services.data_api.app import register_all_handlers +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from test.utils import SanicReusableASGITestClient @pytest_asyncio.fixture(scope="session") -async def oauth2_test_client_setup(app_config: Config) -> SanicASGITestClient: - app_config.async_oauth2_client_class = DummyAsyncOAuth2Client - app_config.connected_services_repo.async_oauth2_client_class = DummyAsyncOAuth2Client - app = Sanic(app_config.app_name) - app = register_all_handlers(app, app_config) +async def oauth2_test_client_setup(app_manager: DependencyManager) -> SanicASGITestClient: + app_manager.async_oauth2_client_class = DummyAsyncOAuth2Client + app_manager.connected_services_repo.async_oauth2_client_class = DummyAsyncOAuth2Client + app = Sanic(app_manager.app_name) + app = register_all_handlers(app, app_manager) async with SanicReusableASGITestClient(app) as client: yield client @@ -88,9 +88,12 @@ async def create_oauth2_connection_helper(provider_id: str, **payload) -> dict[s @pytest.mark.asyncio async def test_get_repository_without_connection( - oauth2_test_client: SanicASGITestClient, user_headers, create_oauth2_provider + mocker, oauth2_test_client: SanicASGITestClient, user_headers, create_oauth2_provider ): """Test getting internal Gitlab repository.""" + http_client = mocker.patch("renku_data_services.repositories.db.HttpClient") + http_client.return_value = DummyAsyncOAuth2Client() + await create_oauth2_provider("provider_1") repository_url = "https://example.org/username/my_repo.git" @@ -139,3 +142,19 @@ async def test_get_one_repository_not_found( assert result.get("connection_id") == connection["id"] assert result.get("provider_id") == "provider_1" assert result.get("repository_metadata") is None + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "repository_url,status_code", + [ + ("https://github.com/SwissDataScienceCenter/renku.git", 200), + ("https://example.org/does-not-exist.git", 404), + ("http://foobar", 404), + ], +) +async def test_get_one_repository_probe(sanic_client: SanicASGITestClient, repository_url, status_code): + repository_url_param = quote_plus(repository_url) + _, response = await sanic_client.get(f"/api/data/repositories/{repository_url_param}/probe") + + assert response.status_code == status_code, response.text diff --git a/test/bases/renku_data_services/data_api/test_resource_pools.py b/test/bases/renku_data_services/data_api/test_resource_pools.py index a7d15ed77..76058a41e 100644 --- a/test/bases/renku_data_services/data_api/test_resource_pools.py +++ b/test/bases/renku_data_services/data_api/test_resource_pools.py @@ -7,55 +7,58 @@ from test.bases.renku_data_services.data_api.utils import create_rp +resource_pool_payload = [ + ( + { + "name": "test-name", + "classes": [ + { + "cpu": 1.0, + "memory": 10, + "gpu": 0, + "name": "test-class-name", + "max_storage": 100, + "default_storage": 1, + "default": True, + "node_affinities": [], + "tolerations": [], + } + ], + "quota": {"cpu": 100, "memory": 100, "gpu": 0}, + "default": False, + "public": True, + "idle_threshold": 86400, + "hibernation_threshold": 99999, + "cluster_id": "change_me", + }, + 201, + ), + ( + { + "name": "test-name", + "classes": [ + { + "cpu": 1.0, + "memory": 10, + "gpu": 0, + "name": "test-class-name", + "max_storage": 100, + "default_storage": 1, + "default": True, + } + ], + "quota": "something", + "default": False, + "public": True, + }, + 422, + ), +] + @pytest.mark.parametrize( "payload,expected_status_code", - [ - ( - { - "name": "test-name", - "classes": [ - { - "cpu": 1.0, - "memory": 10, - "gpu": 0, - "name": "test-class-name", - "max_storage": 100, - "default_storage": 1, - "default": True, - "node_affinities": [], - "tolerations": [], - } - ], - "quota": {"cpu": 100, "memory": 100, "gpu": 0}, - "default": False, - "public": True, - "idle_threshold": 86400, - "hibernation_threshold": 99999, - }, - 201, - ), - ( - { - "name": "test-name", - "classes": [ - { - "cpu": 1.0, - "memory": 10, - "gpu": 0, - "name": "test-class-name", - "max_storage": 100, - "default_storage": 1, - "default": True, - } - ], - "quota": "something", - "default": False, - "public": True, - }, - 422, - ), - ], + resource_pool_payload, ) @pytest.mark.asyncio async def test_resource_pool_creation( @@ -63,10 +66,49 @@ async def test_resource_pool_creation( payload: dict[str, Any], expected_status_code: int, ) -> None: + if "cluster_id" in payload: + payload["cluster_id"] = None + _, res = await create_rp(payload, sanic_client) assert res.status_code == expected_status_code +@pytest.mark.parametrize( + "payload,expected_status_code", + resource_pool_payload, +) +@pytest.mark.asyncio +async def test_resource_pool_creation_with_cluster_ids( + sanic_client: SanicASGITestClient, + payload: dict[str, Any], + expected_status_code: int, +) -> None: + if "cluster_id" in payload: + _, res = await sanic_client.post( + "/api/data/clusters", + json={ + "name": "test-name", + "config_name": "test-class-name.yaml", + "session_protocol": "http", + "session_host": "localhost", + "session_port": 8080, + "session_path": "/renku-sessions", + "session_ingress_annotations": {}, + "session_tls_secret_name": "a-domain-name-tls", + }, + headers={"Authorization": 'Bearer {"is_admin": true}'}, + ) + payload["cluster_id"] = res.json["id"] + + _, res = await create_rp(payload, sanic_client) + assert res.status_code == expected_status_code + + if "cluster_id" in payload: + assert "cluster" in res.json + assert "id" in res.json["cluster"] + assert res.json["cluster"]["id"] == payload["cluster_id"] + + @pytest.mark.asyncio async def test_resource_pool_quotas( sanic_client: SanicASGITestClient, valid_resource_pool_payload: dict[str, Any] @@ -978,3 +1020,165 @@ async def test_delete_all_tolerations( ) assert res.status_code == 200 assert res.json == [] + + +resource_pool_payload = { + "name": "test-name", + "classes": [ + { + "cpu": 1.0, + "memory": 10, + "gpu": 0, + "name": "test-class-name", + "max_storage": 100, + "default_storage": 1, + "default": True, + "node_affinities": [], + "tolerations": [], + } + ], + "quota": {"cpu": 100.0, "memory": 100, "gpu": 0}, + "default": False, + "public": True, + "idle_threshold": 86400, + "hibernation_threshold": 99999, +} + +cluster_payload = { + "config_name": "a-filename.yaml", + "name": "test-cluster-post", +} + + +async def _resource_pools_request( + sanic_client: SanicASGITestClient, + method: str, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + resource_pool_id: int | None, + payload: dict | None, +) -> None: + base_url = "/api/data/resource_pools" + + input_payload = deepcopy(payload) + check_payload = None + if resource_pool_id is None: + tmp = deepcopy(resource_pool_payload) + if "cluster_id" in input_payload and input_payload["cluster_id"] == "replace-me": + _, res = await sanic_client.post("/api/data/clusters/", headers=admin_headers, json=cluster_payload) + assert res.status_code == 201, res.text + + input_payload["cluster_id"] = res.json["id"] + tmp["cluster_id"] = res.json["id"] + + _, res = await sanic_client.post(base_url, headers=admin_headers, json=tmp) + assert res.status_code == 201, res.text + rp = res.json + resource_pool_id = rp["id"] + + if method == "PUT" and "id" not in input_payload["quota"]: + input_payload["quota"]["id"] = rp["quota"]["id"] + + for i, c in enumerate(input_payload["classes"]): + if "id" not in c: + c["id"] = rp["classes"][i]["id"] + + check_payload = deepcopy(input_payload) + + if "id" not in check_payload: + check_payload["id"] = resource_pool_id + if "id" not in check_payload["quota"]: + check_payload["quota"]["id"] = rp["quota"]["id"] + + url = f"{base_url}/{resource_pool_id}" + + if auth: + _, res = await sanic_client.request(url=url, method=method, headers=admin_headers, json=input_payload) + else: + _, res = await sanic_client.request(url=url, method=method, json=input_payload) + + assert res.status_code == expected_status_code, res.text + if res.is_success and check_payload is not None: + assert res.json == check_payload, res.json + + +put_patch_common_test_inputs = [ + (401, False, -1, None), + (422, True, -1, None), + (401, False, 0, None), + (422, True, 0, None), + (401, False, -1, resource_pool_payload), + (422, True, -1, resource_pool_payload), + (401, False, 0, resource_pool_payload), + (422, True, 0, resource_pool_payload), + (401, False, 100, resource_pool_payload), + (422, True, 100, resource_pool_payload), + (401, False, None, resource_pool_payload), + (200, True, None, resource_pool_payload), +] + + +@pytest.mark.parametrize("expected_status_code,auth,resource_pool_id,payload", put_patch_common_test_inputs) +@pytest.mark.asyncio +async def test_resource_pools_put( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + resource_pool_id: int, + payload: dict | None, +) -> None: + await _resource_pools_request( + sanic_client, "PUT", admin_headers, expected_status_code, auth, resource_pool_id, payload + ) + + +@pytest.mark.parametrize("expected_status_code,auth,resource_pool_id,payload", put_patch_common_test_inputs) +@pytest.mark.asyncio +async def test_resource_pools_patch( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + resource_pool_id: int | None, + payload: dict | None, +) -> None: + await _resource_pools_request( + sanic_client, "PATCH", admin_headers, expected_status_code, auth, resource_pool_id, payload + ) + + +@pytest.mark.parametrize( + "expected_status_code,auth,resource_pool_id", + [ + (401, False, -1), + (422, True, -1), + (204, True, 0), + (204, True, 10), + (401, False, None), + (204, True, None), + ], +) +@pytest.mark.asyncio +async def test_resource_pools_delete( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + expected_status_code: int, + auth: bool, + resource_pool_id: str | None, +) -> None: + base_url = "/api/data/resource_pools" + + if resource_pool_id is None: + _, res = await sanic_client.post(base_url, headers=admin_headers, json=resource_pool_payload) + assert res.status_code == 201, res.text + resource_pool_id = res.json["id"] + + url = f"{base_url}/{resource_pool_id}" + + if auth: + _, res = await sanic_client.delete(url, headers=admin_headers) + else: + _, res = await sanic_client.delete(url) + assert res.status_code == expected_status_code, res.text diff --git a/test/bases/renku_data_services/data_api/test_schemathesis.py b/test/bases/renku_data_services/data_api/test_schemathesis.py index a98d7d05a..f53faaa79 100644 --- a/test/bases/renku_data_services/data_api/test_schemathesis.py +++ b/test/bases/renku_data_services/data_api/test_schemathesis.py @@ -1,11 +1,14 @@ import math +import urllib.parse from datetime import timedelta +import httpx import pytest import pytest_asyncio import schemathesis from hypothesis import HealthCheck, settings from sanic_testing.testing import SanicASGITestClient +from schemathesis.checks import ALL_CHECKS from schemathesis.hooks import HookContext from schemathesis.specs.openapi.schemas import BaseOpenAPISchema @@ -49,13 +52,10 @@ async def apispec(sanic_client: SanicASGITestClient) -> BaseOpenAPISchema: def filter_headers(context: HookContext, headers: dict[str, str] | None) -> bool: op = context.operation if headers is not None and op.method.upper() == "PATCH": - if_match = headers.get("If-Match") - if if_match and isinstance(if_match, str): - try: - if_match.encode("ascii") - return True - except UnicodeEncodeError: - return False + try: + [h.encode("ascii") for h in headers.values()] + except UnicodeEncodeError: + return False return True @@ -64,7 +64,19 @@ def filter_headers(context: HookContext, headers: dict[str, str] | None) -> bool # and this crashes the server when it tries to validate the query. @schemathesis.hook def filter_query(context: HookContext, query: dict[str, str] | None) -> bool: - return query is None or "" not in query + op = context.operation + if op is None: + return True + if query: + client = httpx.Client() + req = client.build_request(op.method, op.full_path, params=query) + parsed_query = urllib.parse.parse_qs(req.url.query) + original_keys = set(query.keys()) + parsed_keys = set(k.decode() for k in parsed_query) + if original_keys != parsed_keys: + # urlparse would filter data in query and data tested would not match test case + return False + return query is None or ("" not in query and "" not in query.values()) schema = schemathesis.from_pytest_fixture( @@ -81,10 +93,7 @@ def filter_query(context: HookContext, query: dict[str, str] | None) -> bool: ] # TODO: RE-enable schemathesis when CI setup for notebooks / sessions is ready -EXCLUDE_PATH_PREFIXES = [ - "/sessions", - "/notebooks", -] +EXCLUDE_PATH_PREFIXES = ["/sessions", "/notebooks"] @pytest.mark.schemathesis @@ -93,7 +102,7 @@ def filter_query(context: HookContext, query: dict[str, str] | None) -> bool: @settings(max_examples=5, suppress_health_check=[HealthCheck.filter_too_much, HealthCheck.data_too_large]) async def test_api_schemathesis( case: schemathesis.Case, - sanic_client: SanicASGITestClient, + sanic_client_with_solr: SanicASGITestClient, admin_headers: dict, requests_statistics: list[timedelta], ) -> None: @@ -101,8 +110,15 @@ async def test_api_schemathesis( if case.path.startswith(exclude_prefix): return req_kwargs = case.as_requests_kwargs(headers=admin_headers) - _, res = await sanic_client.request(**req_kwargs) + _, res = await sanic_client_with_solr.request(**req_kwargs) res.request.uri = str(res.url) + if all(slow[0] != case.path or slow[1] != case.method for slow in ALLOWED_SLOW_ENDPOINTS): requests_statistics.append(res.elapsed) - case.validate_response(res) + + checks = ALL_CHECKS + if req_kwargs.get("method") == "DELETE" and res.status_code == 204: + # schemathesis does not currently allow accepting status 204 for negative data, so we ignore that check + checks = tuple(c for c in checks if c.__name__ != "negative_data_rejection") + + case.validate_response(res, checks=checks) diff --git a/test/bases/renku_data_services/data_api/test_search.py b/test/bases/renku_data_services/data_api/test_search.py new file mode 100644 index 000000000..8e7796612 --- /dev/null +++ b/test/bases/renku_data_services/data_api/test_search.py @@ -0,0 +1,265 @@ +import pytest + +from renku_data_services.base_models.core import APIUser +from renku_data_services.data_connectors.apispec import DataConnector as ApiDataConnector +from renku_data_services.namespace.apispec import GroupResponse as ApiGroup +from renku_data_services.project.apispec import Project as ApiProject +from renku_data_services.search.apispec import ( + Group as SearchGroup, +) +from renku_data_services.search.apispec import ( + SearchDataConnector, + SearchEntity, + SearchProject, + SearchResult, +) +from renku_data_services.search.apispec import ( + User as SearchUser, +) +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.users.models import UserInfo +from test.bases.renku_data_services.data_api.conftest import ( + CreateDataConnectorCall, + CreateGroupCall, + CreateProjectCall, + CreateUserCall, + SearchQueryCall, + SearchReprovisionCall, +) + + +@pytest.mark.asyncio +async def test_direct_member_search( + create_user: CreateUserCall, + regular_user: UserInfo, + search_reprovision: SearchReprovisionCall, + create_project_model: CreateProjectCall, + create_group_model: CreateGroupCall, + search_query: SearchQueryCall, +) -> None: + # - users: mads, wout, reg, florian + # - group-lidl (owner=reg, editor=mads, viewer=wout) + # - project za (owner=mads, public, editor=florian) + # - project zb (owner=mads, private) + # - project zc (owner=mads, public, editor=wout) + # + # - group-visma (owner=reg, editor=wout, viewer=mads) + # - project ya (owner=wout, public) + # - project yb (owner=wout, private, viewer=florian) + + mads = await create_user(APIUser(id="mads-123", first_name="Mads", last_name="Pedersen")) + wout = await create_user(APIUser(id="wout-567", first_name="Wout", last_name="van Art")) + flor = await create_user(APIUser(id="flor-789", first_name="Florian", last_name="Lipowitz")) + gr_lidl = await create_group_model( + "Lidl-Trek", + members=[{"id": mads.id, "role": "editor"}, {"id": wout.id, "role": "viewer"}], + user=regular_user, + ) + gr_visma = await create_group_model( + "Visma LeaseABike", + members=[{"id": wout.id, "role": "editor"}, {"id": mads.id, "role": "viewer"}], + user=regular_user, + ) + + p1 = await create_project_model( + "project za", mads, visibility="public", members=[{"id": flor.id, "role": "editor"}], namespace=gr_lidl.slug + ) + p2 = await create_project_model("project zb", mads, visibility="private", namespace=gr_lidl.slug) + p3 = await create_project_model( + "project zc", mads, visibility="public", namespace=gr_lidl.slug, members=[{"id": wout.id, "role": "editor"}] + ) + p4 = await create_project_model("project ya", wout, visibility="public", namespace=gr_visma.slug) + p5 = await create_project_model( + "Project yb", wout, visibility="private", members=[{"id": flor.id, "role": "viewer"}], namespace=gr_visma.slug + ) + await search_reprovision() + + result = await search_query(f"namespace:{gr_lidl.slug}", user=flor) + assert_search_result(result, [p1, p3]) + + result = await search_query(f"namespace:{gr_lidl.slug}", user=wout) + assert_search_result(result, [p1, p2, p3]) + + result = await search_query(f"namespace:{gr_lidl.slug} direct_member:@{flor.namespace.path.first}", user=mads) + assert_search_result(result, [p1]) + + result = await search_query(f"namespace:{gr_lidl.slug} direct_member:@{wout.namespace.path.first}", user=mads) + assert_search_result(result, [p3]) + + result = await search_query(f"namespace:{gr_visma.slug} direct_member:@{flor.namespace.path.first}", user=wout) + assert_search_result(result, [p5]) + + result = await search_query(f"namespace:{gr_visma.slug} direct_member:@{flor.namespace.path.first}", user=mads) + assert_search_result(result, [p5]) + + result = await search_query(f"direct_member:@{wout.namespace.path.first}", user=regular_user) + assert_search_result(result, [gr_lidl, gr_visma, p3, p4, p5]) + + result = await search_query(f"direct_member:@{wout.namespace.path.first},@{mads.namespace.path.first}", user=mads) + assert_search_result(result, [p3, gr_visma, gr_lidl]) + + result = await search_query(f"inherited_member:@{wout.namespace.path.first}", user=regular_user) + assert_search_result(result, [gr_lidl, gr_visma, p1, p2, p3, p4, p5]) + + +@pytest.mark.asyncio +async def test_inherited_member_search( + create_user: CreateUserCall, + regular_user: UserInfo, + search_reprovision: SearchReprovisionCall, + create_project_model: CreateProjectCall, + create_group_model: CreateGroupCall, + search_query: SearchQueryCall, +) -> None: + mads = await create_user(APIUser(id="id-123", first_name="Mads", last_name="Pedersen")) + wout = await create_user(APIUser(id="id-567", first_name="Wout", last_name="van Art")) + + gr_visma = await create_group_model("Visma", members=[{"id": wout.id, "role": "editor"}]) + gr_lidl = await create_group_model("Lidl-Trek", members=[{"id": mads.id, "role": "viewer"}]) + + p1 = await create_project_model(name="private bike clean course 1 of 54", namespace=gr_visma.slug) + p2 = await create_project_model( + name="public bike clean course 42 of 54", namespace=gr_visma.slug, visibility="public" + ) + p3 = await create_project_model(name="private get the bike dirty course 1/2", namespace=gr_lidl.slug) + p4 = await create_project_model( + name="public get the bike dirty course 2/2", namespace=gr_lidl.slug, visibility="public" + ) + p5 = await create_project_model(name="public get the bike dirty course 2/2", visibility="private") + + await search_reprovision() + + ## Searching as 'regular_user' returns all entities, since this is the user implicitely used to create everything + result = await search_query(f"inherited_member:@{regular_user.namespace.path.first}", regular_user) + + # 5 projects, 2 groups. users are removed there is no "membership" relation + assert_search_result(result, [p1, p2, p3, p4, p5, gr_visma, gr_lidl], check_order=False) + + ## Searching as 'regular_user' peeking into a different users' entities + result = await search_query(f"inherited_member:@{mads.namespace.path.first}", regular_user) + assert_search_result(result, [p3, p4, gr_lidl], check_order=False) + + ## searching as mads, shows own enities + result = await search_query(f"inherited_member:@{mads.namespace.path.first}", mads) + assert_search_result(result, [p3, p4, gr_lidl], check_order=False) + + ## searching as wout, shows own enities + result = await search_query(f"inherited_member:@{wout.namespace.path.first}", wout) + assert_search_result(result, [p1, p2, gr_visma], check_order=False) + + ## mads inspecting wouts, shows only public entities from wout + result = await search_query(f"inherited_member:@{wout.namespace.path.first}", mads) + assert_search_result(result, [p2, gr_visma], check_order=False) + + ## searching as anonymous + result = await search_query(f"inherited_member:@{wout.namespace.path.first}") + assert_search_result(result, [p2, gr_visma], check_order=False) + + ## with the username, anonymous can find every entity the user is "member of" + result = await search_query(f"inherited_member:@{regular_user.namespace.path.first}") + assert_search_result(result, [p2, p4, gr_visma, gr_lidl], check_order=False) + + +@pytest.mark.asyncio +async def test_projects( + search_reprovision: SearchReprovisionCall, create_project_model: CreateProjectCall, search_query: SearchQueryCall +) -> None: + """More occurrences of a word should push results up.""" + p1 = await create_project_model("Project Bike Z", visibility="public", description="a bike with a bike") + + p2 = await create_project_model("Project Bike A", visibility="public") + p3 = await create_project_model("Project Bike R", visibility="public", description="a bike") + await search_reprovision() + + result = await search_query("bike") + assert_search_result(result, [p1, p3, p2], check_order=True) + + +@pytest.mark.asyncio +async def test_distance( + search_reprovision: SearchReprovisionCall, create_project_model: CreateProjectCall, search_query: SearchQueryCall +) -> None: + """Search should be lenient to simple typos, distance=2.""" + p1 = await create_project_model("Project Bike Z", visibility="public", description="a bike with a bike") + await search_reprovision() + + result = await search_query("mikin type:project") + assert result.items == [] + + result = await search_query("mike type:project") + assert result.items is not None + assert len(result.items) == 1 + assert __entity_id(result.items[0]) == p1.id + + +@pytest.mark.asyncio +async def test_search_by_entity_type( + create_project_model: CreateProjectCall, + create_group_model: CreateGroupCall, + create_data_connector_model: CreateDataConnectorCall, + regular_user: UserInfo, + search_query: SearchQueryCall, + search_reprovision: SearchReprovisionCall, +) -> None: + p1 = await create_project_model("Project Mine") + g1 = await create_group_model("Group Wine") + d1 = await create_data_connector_model("Data Zine", visibility="public") + await search_reprovision() + + result = await search_query("type:dataconnector,project,group", regular_user) + assert_search_result(result, [p1, g1, d1], check_order=False) + + for field in EntityType._member_map_.values(): + qstr = [f"type:{field.value}", f"type:{field.value.upper()}", f"type:{field.value.lower()}"] + for q in qstr: + result = await search_query(q, regular_user) + items = result.items or [] + assert len(items) >= 1, f"Invalid results for query '{q}': {items}" + for item in items: + assert item.root.type == field.value + + +def __entity_id(e: SearchEntity) -> str: + match e.root: + case SearchProject() as p: + return p.id + + case SearchGroup() as g: + return g.id + + case SearchDataConnector() as d: + return d.id + + case SearchUser() as u: + return u.id + + +def __api_entity_id(e: ApiProject | ApiGroup | ApiDataConnector) -> str: + match e: + case ApiProject() as p: + return p.id + case ApiGroup() as g: + return g.id + case ApiDataConnector() as d: + return d.id + + +def assert_search_result( + result: SearchResult, entities: list[ApiProject | ApiGroup | ApiDataConnector], check_order: bool = False +) -> None: + items = [__entity_id(e) for e in result.items or []] + expected = [__api_entity_id(e) for e in entities] + if not check_order: + items.sort() + expected.sort() + + if len(items) < len(expected): + missing = set(expected).difference(set(items)) + raise Exception(f"Some entities are missing in the search result: {missing}") + + if len(items) > len(expected): + missing = set(items).difference(set(expected)) + raise Exception(f"Too many results than expected: {missing}") + + for r, e in zip(items, expected, strict=True): + assert r == e, f"Unexpected element (result={r}, expected={e}) in {items} vs {expected}" diff --git a/test/bases/renku_data_services/data_api/test_secret.py b/test/bases/renku_data_services/data_api/test_secret.py index e47304247..41beb2ebf 100644 --- a/test/bases/renku_data_services/data_api/test_secret.py +++ b/test/bases/renku_data_services/data_api/test_secret.py @@ -2,7 +2,7 @@ import time from base64 import b64decode -from datetime import datetime, timedelta +from datetime import UTC, datetime, timedelta from typing import Any import pytest @@ -13,6 +13,7 @@ from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId from renku_data_services.secrets.core import rotate_encryption_keys, rotate_single_encryption_key from renku_data_services.secrets.models import Secret, SecretKind +from renku_data_services.secrets_storage_api.dependencies import DependencyManager from renku_data_services.users import apispec from renku_data_services.utils.cryptography import ( decrypt_rsa, @@ -26,9 +27,14 @@ @pytest.fixture def create_secret(sanic_client: SanicASGITestClient, user_headers): async def create_secret_helper( - name: str, value: str, kind: str = "general", expiration_timestamp: str = None + name: str, value: str, kind: str = "general", default_filename: str | None = None, expiration_timestamp: str | None = None ) -> dict[str, Any]: - payload = {"name": name, "value": value, "kind": kind, "expiration_timestamp": expiration_timestamp} + payload = {"name": name, "value": value, "kind": kind} + if default_filename: + payload["default_filename"] = default_filename + if expiration_timestamp: + payload["expiration_timestamp"] = expiration_timestamp + _, response = await sanic_client.post("/api/data/user/secrets", headers=user_headers, json=payload) @@ -50,9 +56,20 @@ async def test_create_secrets(sanic_client: SanicASGITestClient, user_headers, k assert response.status_code == 201, response.text assert response.json is not None - assert response.json.keys() == {"id", "name", "kind", "expiration_timestamp", "modification_date"} - assert response.json["name"] == "my-secret" + assert response.json.keys() == { + "id", + "name", + "default_filename", + "modification_date", + "kind", + "session_secret_slot_ids", + "data_connector_ids", + "expiration_timestamp", + } assert response.json["id"] is not None + assert response.json["name"] == "my-secret" + assert response.json["default_filename"] is not None + assert response.json["modification_date"] is not None assert response.json["kind"] == kind assert response.json["expiration_timestamp"] is None assert response.json["modification_date"] is not None @@ -263,14 +280,14 @@ async def test_anonymous_users_cannot_create_secrets(sanic_client: SanicASGITest async def test_secret_encryption_decryption( sanic_client: SanicASGITestClient, secrets_sanic_client: SanicASGITestClient, - secrets_storage_app_config, + secrets_storage_app_manager, user_headers, create_secret, ) -> None: """Test adding a secret and decrypting it in the secret service.""" - secret1 = await create_secret("secret-1", "value-1") + secret1 = await create_secret("secret-1", "value-1", default_filename="secret-1") secret1_id = secret1["id"] - secret2 = await create_secret("secret-2", "value-2") + secret2 = await create_secret("secret-2", "value-2", default_filename="secret-2") secret2_id = secret2["id"] payload = { @@ -289,8 +306,8 @@ async def test_secret_encryption_decryption( _, response = await secrets_sanic_client.post("/api/secrets/kubernetes", headers=user_headers, json=payload) assert response.status_code == 201 - assert "test-secret" in secrets_storage_app_config.core_client.secrets - k8s_secret = secrets_storage_app_config.core_client.secrets["test-secret"].data + assert "test-secret" in secrets_storage_app_manager.core_client.secrets + k8s_secret = secrets_storage_app_manager.core_client.secrets["test-secret"].data assert k8s_secret.keys() == {"secret-1", "secret-2"} _, response = await sanic_client.get("/api/data/user/secret_key", headers=user_headers) @@ -306,7 +323,7 @@ async def test_secret_encryption_decryption( async def test_secret_encryption_decryption_with_key_mapping( sanic_client: SanicASGITestClient, secrets_sanic_client: SanicASGITestClient, - secrets_storage_app_config, + secrets_storage_app_manager, user_headers, create_secret, ) -> None: @@ -315,11 +332,13 @@ async def test_secret_encryption_decryption_with_key_mapping( secret1_id = secret1["id"] secret2 = await create_secret("secret-2", "value-2") secret2_id = secret2["id"] + secret3 = await create_secret("secret-3", "value-3") + secret3_id = secret3["id"] payload = { "name": "test-secret", "namespace": "test-namespace", - "secret_ids": [secret1_id, secret2_id], + "secret_ids": [secret1_id, secret2_id, secret3_id], "owner_references": [ { "apiVersion": "amalthea.dev/v1alpha1", @@ -331,14 +350,15 @@ async def test_secret_encryption_decryption_with_key_mapping( "key_mapping": { secret1_id: "access_key_id", secret2_id: "secret_access_key", + secret3_id: ["secret-3-one", "secret-3-two"], }, } _, response = await secrets_sanic_client.post("/api/secrets/kubernetes", headers=user_headers, json=payload) assert response.status_code == 201 - assert "test-secret" in secrets_storage_app_config.core_client.secrets - k8s_secret = secrets_storage_app_config.core_client.secrets["test-secret"].data - assert k8s_secret.keys() == {"access_key_id", "secret_access_key"} + assert "test-secret" in secrets_storage_app_manager.core_client.secrets + k8s_secret = secrets_storage_app_manager.core_client.secrets["test-secret"].data + assert k8s_secret.keys() == {"access_key_id", "secret_access_key", "secret-3-one", "secret-3-two"} _, response = await sanic_client.get("/api/data/user/secret_key", headers=user_headers) assert response.status_code == 200 @@ -347,6 +367,8 @@ async def test_secret_encryption_decryption_with_key_mapping( assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["access_key_id"])) == "value-1" assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret_access_key"])) == "value-2" + assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret-3-one"])) == "value-3" + assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret-3-two"])) == "value-3" # NOTE: Test missing secret_id in key mapping payload["key_mapping"] = {secret1_id: "access_key_id"} @@ -357,7 +379,7 @@ async def test_secret_encryption_decryption_with_key_mapping( assert response.json["error"]["message"] == "Key mapping must include all requested secret IDs" # NOTE: Test duplicated key mapping - payload["key_mapping"] = {secret1_id: "access_key_id", secret2_id: "access_key_id"} + payload["key_mapping"] = {secret1_id: "access_key_id", secret2_id: "access_key_id", secret3_id: "secret-3"} _, response = await secrets_sanic_client.post("/api/secrets/kubernetes", headers=user_headers, json=payload) @@ -380,10 +402,14 @@ async def test_single_secret_rotation(): secret = Secret( id=ULID(), - name="test_secret", + name="My secret", + default_filename="test_secret", encrypted_value=encrypted_value, encrypted_key=encrypted_key, kind=SecretKind.general, + modification_date=datetime.now(tz=UTC), + session_secret_slot_ids=[], + data_connector_ids=[], ) rotated_secret = await rotate_single_encryption_key(secret, user_id, new_key, old_key) @@ -404,7 +430,9 @@ async def test_single_secret_rotation(): @pytest.mark.asyncio -async def test_secret_rotation(sanic_client, secrets_storage_app_config, create_secret, user_headers, users): +async def test_secret_rotation( + sanic_client, secrets_storage_app_manager: DependencyManager, create_secret, user_headers, users +): """Test rotating multiple secrets.""" for i in range(10): @@ -415,12 +443,12 @@ async def test_secret_rotation(sanic_client, secrets_storage_app_config, create_ await rotate_encryption_keys( admin, new_key, - secrets_storage_app_config.secrets_service_private_key, - secrets_storage_app_config.user_secrets_repo, + secrets_storage_app_manager.config.secrets.private_key, + secrets_storage_app_manager.user_secrets_repo, batch_size=5, ) - secrets = [s async for s in secrets_storage_app_config.user_secrets_repo.get_all_secrets_batched(admin, 100)] + secrets = [s async for s in secrets_storage_app_manager.user_secrets_repo.get_all_secrets_batched(admin, 100)] batch = secrets[0] assert len(batch) == 10 @@ -434,3 +462,24 @@ async def test_secret_rotation(sanic_client, secrets_storage_app_config, create_ decrypted_value = decrypt_string(new_encryption_key, users[1].id, secret.encrypted_value).encode() # type: ignore decrypted_value = decrypt_string(secret_key.encode(), users[1].id, decrypted_value) assert f"secret-{decrypted_value}" == secret.name + + +@pytest.mark.asyncio +async def test_patch_user_secret(sanic_client: SanicASGITestClient, user_headers, create_secret) -> None: + secret = await create_secret("a-secret", "value-2") + secret_id = secret["id"] + + payload = {"name": "A very important secret", "default_filename": "my-secret.txt"} + + _, response = await sanic_client.patch(f"/api/data/user/secrets/{secret_id}", headers=user_headers, json=payload) + + assert response.status_code == 200, response.text + + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_id}", headers=user_headers) + + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json["id"] == secret_id + assert "value" not in response.json + assert response.json.get("name") == "A very important secret" + assert response.json.get("default_filename") == "my-secret.txt" diff --git a/test/bases/renku_data_services/data_api/test_session_secrets.py b/test/bases/renku_data_services/data_api/test_session_secrets.py new file mode 100644 index 000000000..c5d2a48f1 --- /dev/null +++ b/test/bases/renku_data_services/data_api/test_session_secrets.py @@ -0,0 +1,628 @@ +"""Tests for session secrets blueprint.""" + +from typing import Any + +import pytest +import pytest_asyncio +from sanic_testing.testing import SanicASGITestClient +from ulid import ULID + +from renku_data_services.users.models import UserInfo +from test.bases.renku_data_services.data_api.utils import merge_headers + + +@pytest_asyncio.fixture +async def create_session_secret_slot(sanic_client: SanicASGITestClient, regular_user: UserInfo, user_headers): + async def create_session_secret_slot_helper( + project_id: str, filename: str, user: UserInfo | None = None, headers: dict[str, str] | None = None, **payload + ) -> dict[str, Any]: + user = user or regular_user + headers = headers or user_headers + secret_slot_payload = {"project_id": project_id, "filename": filename, "description": "A secret slot"} + secret_slot_payload.update(payload) + + _, response = await sanic_client.post( + "/api/data/session_secret_slots", headers=headers, json=secret_slot_payload + ) + + assert response.status_code == 201, response.text + return response.json + + return create_session_secret_slot_helper + + +@pytest.mark.asyncio +async def test_post_session_secret_slot(sanic_client: SanicASGITestClient, create_project, user_headers) -> None: + project = await create_project("My project") + project_id = project["id"] + + payload = { + "project_id": project_id, + "filename": "test_secret", + "name": "My secret", + "description": "This is a secret slot.", + } + _, response = await sanic_client.post("/api/data/session_secret_slots", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + secret_slot = response.json + assert secret_slot.get("filename") == "test_secret" + assert secret_slot.get("name") == "My secret" + assert secret_slot.get("description") == "This is a secret slot." + + +@pytest.mark.asyncio +async def test_post_session_secret_slot_with_minimal_payload( + sanic_client: SanicASGITestClient, create_project, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + + payload = { + "project_id": project_id, + "filename": "test_secret", + } + _, response = await sanic_client.post("/api/data/session_secret_slots", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + secret_slot = response.json + assert secret_slot.get("filename") == "test_secret" + assert secret_slot.get("name") == "test_secret" + assert secret_slot.get("description") is None + + +@pytest.mark.asyncio +async def test_post_session_secret_slot_with_invalid_project_id( + sanic_client: SanicASGITestClient, create_project, user_headers +) -> None: + project_id = str(ULID()) + + payload = { + "project_id": project_id, + "filename": "test_secret", + "name": "My secret", + "description": "This is a secret slot.", + } + _, response = await sanic_client.post("/api/data/session_secret_slots", headers=user_headers, json=payload) + + assert response.status_code == 404, response.text + + +@pytest.mark.asyncio +async def test_post_session_secret_slot_with_unauthorized_project( + sanic_client: SanicASGITestClient, create_project, user_headers +) -> None: + project = await create_project("My project", admin=True) + project_id = project["id"] + + payload = { + "project_id": project_id, + "filename": "test_secret", + "name": "My secret", + "description": "This is a secret slot.", + } + _, response = await sanic_client.post("/api/data/session_secret_slots", headers=user_headers, json=payload) + + assert response.status_code == 404, response.text + + +@pytest.mark.asyncio +async def test_post_session_secret_slot_with_invalid_filename( + sanic_client: SanicASGITestClient, create_project, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + + payload = { + "project_id": project_id, + "filename": "test/secret", + } + _, response = await sanic_client.post("/api/data/session_secret_slots", headers=user_headers, json=payload) + + assert response.status_code == 422, response.text + assert "filename: String should match pattern" in response.json["error"]["message"] + + +@pytest.mark.asyncio +async def test_post_session_secret_slot_with_conflicting_filename( + sanic_client: SanicASGITestClient, create_project, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + payload = { + "project_id": project_id, + "name": "Existing secret", + "filename": "test_secret", + } + _, response = await sanic_client.post("/api/data/session_secret_slots", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + + payload = { + "project_id": project_id, + "filename": "test_secret", + } + _, response = await sanic_client.post("/api/data/session_secret_slots", headers=user_headers, json=payload) + + assert response.status_code == 409, response.text + + +@pytest.mark.asyncio +async def test_get_project_session_secret_slots( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + + for i in range(1, 10): + await create_session_secret_slot(project_id, f"secret_slot_{i}") + + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secret_slots", headers=user_headers) + + assert response.status_code == 200, response.text + assert response.json is not None + secret_slots = response.json + assert {secret_slot["filename"] for secret_slot in secret_slots} == {f"secret_slot_{i}" for i in range(1, 10)} + + +@pytest.mark.asyncio +async def test_get_one_session_secret_slot( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + + _, response = await sanic_client.get(f"/api/data/session_secret_slots/{secret_slot_id}", headers=user_headers) + + assert response.status_code == 200, response.text + assert response.json is not None + secret_slot = response.json + assert secret_slot.keys() == {"id", "project_id", "name", "description", "filename", "etag"} + assert secret_slot.get("id") == secret_slot_id + assert secret_slot.get("project_id") == project_id + assert secret_slot.get("filename") == "test_secret" + assert secret_slot.get("name") == "test_secret" + assert secret_slot.get("description") == "A secret slot" + assert secret_slot.get("etag") is not None + + +@pytest.mark.asyncio +@pytest.mark.parametrize("headers_name", ["unauthorized_headers", "member_1_headers"]) +async def test_get_one_session_secret_slot_unauthorized( + sanic_client: SanicASGITestClient, + create_project, + create_session_secret_slot, + headers_name, + request, + unauthorized_headers, + member_1_headers, +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + + headers = request.getfixturevalue(headers_name) + _, response = await sanic_client.get(f"/api/data/session_secret_slots/{secret_slot_id}", headers=headers) + + assert response.status_code == 404, response.text + + +@pytest.mark.asyncio +async def test_patch_session_secret_slot( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + + headers = merge_headers(user_headers, {"If-Match": secret_slot["etag"]}) + patch = { + "name": "New Name", + "description": "Updated session secret slot", + "filename": "new_filename", + } + + _, response = await sanic_client.patch( + f"/api/data/session_secret_slots/{secret_slot_id}", headers=headers, json=patch + ) + + assert response.status_code == 200, response.text + assert response.json is not None + secret_slot = response.json + assert secret_slot.get("id") == secret_slot_id + assert secret_slot.get("project_id") == project_id + assert secret_slot.get("filename") == "new_filename" + assert secret_slot.get("name") == "New Name" + assert secret_slot.get("description") == "Updated session secret slot" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("field", ["id", "project_id"]) +async def test_patch_session_secret_slot_reserved_fields_are_forbidden( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers, field +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + original_value = secret_slot[field] + + headers = merge_headers(user_headers, {"If-Match": secret_slot["etag"]}) + patch = { + field: "new-value", + } + _, response = await sanic_client.patch( + f"/api/data/session_secret_slots/{secret_slot_id}", headers=headers, json=patch + ) + + assert response.status_code == 422, response.text + assert f"{field}: Extra inputs are not permitted" in response.text + + # Check that the field's value didn't change + _, response = await sanic_client.get(f"/api/data/session_secret_slots/{secret_slot_id}", headers=user_headers) + assert response.status_code == 200, response.text + secret_slot = response.json + assert secret_slot[field] == original_value + + +@pytest.mark.asyncio +async def test_patch_session_secret_slot_without_if_match_header( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + original_value = secret_slot["name"] + + patch = { + "name": "New Name", + } + _, response = await sanic_client.patch( + f"/api/data/session_secret_slots/{secret_slot_id}", headers=user_headers, json=patch + ) + + assert response.status_code == 428, response.text + assert "If-Match header not provided" in response.text + + # Check that the field's value didn't change + _, response = await sanic_client.get(f"/api/data/session_secret_slots/{secret_slot_id}", headers=user_headers) + assert response.status_code == 200, response.text + data_connector = response.json + assert data_connector["name"] == original_value + + +@pytest.mark.asyncio +async def test_patch_session_secret_slot_with_invalid_filename( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + original_value = secret_slot["name"] + + headers = merge_headers(user_headers, {"If-Match": secret_slot["etag"]}) + patch = { + "filename": "test/secret", + } + _, response = await sanic_client.patch( + f"/api/data/session_secret_slots/{secret_slot_id}", headers=headers, json=patch + ) + + assert response.status_code == 422, response.text + assert "filename: String should match pattern" in response.json["error"]["message"] + + # Check that the field's value didn't change + _, response = await sanic_client.get(f"/api/data/session_secret_slots/{secret_slot_id}", headers=user_headers) + assert response.status_code == 200, response.text + secret_slot = response.json + assert secret_slot["filename"] == original_value + + +@pytest.mark.asyncio +async def test_patch_session_secret_slot_with_conflicting_filename( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + await create_session_secret_slot(project_id, "existing_filename") + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + original_value = secret_slot["name"] + + headers = merge_headers(user_headers, {"If-Match": secret_slot["etag"]}) + patch = { + "filename": "existing_filename", + } + _, response = await sanic_client.patch( + f"/api/data/session_secret_slots/{secret_slot_id}", headers=headers, json=patch + ) + + assert response.status_code == 409, response.text + + # Check that the field's value didn't change + _, response = await sanic_client.get(f"/api/data/session_secret_slots/{secret_slot_id}", headers=user_headers) + assert response.status_code == 200, response.text + secret_slot = response.json + assert secret_slot["filename"] == original_value + + +@pytest.mark.asyncio +async def test_delete_session_secret_slot( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + await create_session_secret_slot(project_id, "test_secret_1") + secret_slot = await create_session_secret_slot(project_id, "test_secret_2") + await create_session_secret_slot(project_id, "test_secret_3") + secret_slot_id = secret_slot["id"] + + _, response = await sanic_client.delete(f"/api/data/session_secret_slots/{secret_slot_id}", headers=user_headers) + + assert response.status_code == 204, response.text + + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secret_slots", headers=user_headers) + + assert response.status_code == 200, response.text + assert {secret_slot["filename"] for secret_slot in response.json} == {"test_secret_1", "test_secret_3"} + + +@pytest.mark.asyncio +async def test_patch_session_secrets_with_existing_user_secret( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + payload = {"name": "my-user-secret", "value": "a secret value", "kind": "general"} + _, response = await sanic_client.post("/api/data/user/secrets", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + user_secret = response.json + user_secret_id = user_secret["id"] + + patch = [{"secret_slot_id": secret_slot_id, "secret_id": user_secret_id}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + + assert response.status_code == 200, response.text + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0]["secret_slot"].get("name") == secret_slot["name"] + assert session_secrets[0]["secret_slot"].get("filename") == secret_slot["filename"] + assert session_secrets[0].get("secret_id") == user_secret_id + + # Check that the secrets are returned from a GET request + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secrets", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0].get("secret_id") == user_secret_id + + # Check that the secret slot is referenced from the user secret + _, response = await sanic_client.get(f"/api/data/user/secrets/{user_secret_id}", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + assert response.json.get("session_secret_slot_ids") is not None + assert {id for id in response.json.get("session_secret_slot_ids")} == {secret_slot_id} + + +@pytest.mark.asyncio +async def test_patch_session_secrets_with_new_secret_value( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + + patch = [{"secret_slot_id": secret_slot_id, "value": "a new secret value"}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + + assert response.status_code == 200, response.text + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0]["secret_slot"].get("name") == secret_slot["name"] + assert session_secrets[0]["secret_slot"].get("filename") == secret_slot["filename"] + assert session_secrets[0].get("secret_id") is not None + + # Check that the secrets are returned from a GET request + _, response = await sanic_client.get("/api/data/user/secrets", headers=user_headers) + assert response.status_code == 200, response.text + user_secrets = response.json + new_user_secret = user_secrets[0] + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secrets", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0].get("secret_id") == new_user_secret["id"] + + # Check that the secret slot is referenced from the user secret + _, response = await sanic_client.get(f"/api/data/user/secrets/{new_user_secret["id"]}", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + assert response.json.get("session_secret_slot_ids") is not None + assert {id for id in response.json.get("session_secret_slot_ids")} == {secret_slot_id} + + +@pytest.mark.asyncio +async def test_patch_session_secrets_update_with_another_user_secret( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + patch = [{"secret_slot_id": secret_slot_id, "value": "a new secret value"}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + assert response.status_code == 200, response.json + session_secrets = response.json + + payload = {"name": "my-user-secret", "value": "another secret value", "kind": "general"} + _, response = await sanic_client.post("/api/data/user/secrets", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + replacement_user_secret = response.json + replacement_user_secret_id = replacement_user_secret["id"] + + patch = [{"secret_slot_id": secret_slot_id, "secret_id": replacement_user_secret_id}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + + assert response.status_code == 200, response.text + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0]["secret_slot"].get("name") == secret_slot["name"] + assert session_secrets[0]["secret_slot"].get("filename") == secret_slot["filename"] + assert session_secrets[0].get("secret_id") == replacement_user_secret_id + + # Check that the secrets are returned from a GET request + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secrets", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0].get("secret_id") == replacement_user_secret_id + + +@pytest.mark.asyncio +async def test_patch_session_secrets_update_with_a_new_secret_value( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + patch = [{"secret_slot_id": secret_slot_id, "value": "a new secret value"}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + assert response.status_code == 200, response.json + session_secrets = response.json + _, response = await sanic_client.get("/api/data/user/secrets", headers=user_headers) + assert response.status_code == 200, response.text + user_secrets = response.json + existing_user_secret = user_secrets[0] + + patch = [{"secret_slot_id": secret_slot_id, "value": "an updated secret value"}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + + assert response.status_code == 200, response.text + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0]["secret_slot"].get("name") == secret_slot["name"] + assert session_secrets[0]["secret_slot"].get("filename") == secret_slot["filename"] + assert session_secrets[0].get("secret_id") == existing_user_secret["id"] + + # Check that the secrets are returned from a GET request + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secrets", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 1 + assert session_secrets[0].get("secret_slot") is not None + assert session_secrets[0]["secret_slot"].get("id") == secret_slot_id + assert session_secrets[0].get("secret_id") == existing_user_secret["id"] + + +@pytest.mark.asyncio +async def test_patch_session_secrets_unlink_secret_with_null( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + patch = [{"secret_slot_id": secret_slot_id, "value": "a new secret value"}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + assert response.status_code == 200, response.json + session_secrets = response.json + _, response = await sanic_client.get("/api/data/user/secrets", headers=user_headers) + assert response.status_code == 200, response.text + user_secrets = response.json + existing_user_secret = user_secrets[0] + + patch = [{"secret_slot_id": secret_slot_id, "value": None}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + + assert response.status_code == 200, response.text + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 0 + + # Check that the secrets are returned from a GET request + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secrets", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 0 + # Check that the user secret has been preserved + _, response = await sanic_client.get("/api/data/user/secrets", headers=user_headers) + assert response.status_code == 200, response.text + user_secrets = response.json + assert len(user_secrets) == 1 + assert {s["id"] for s in user_secrets} == {existing_user_secret["id"]} + + +@pytest.mark.asyncio +async def test_delete_session_secrets( + sanic_client: SanicASGITestClient, create_project, create_session_secret_slot, user_headers +) -> None: + project = await create_project("My project") + project_id = project["id"] + secret_slot = await create_session_secret_slot(project_id, "test_secret") + secret_slot_id = secret_slot["id"] + patch = [{"secret_slot_id": secret_slot_id, "value": "a new secret value"}] + _, response = await sanic_client.patch( + f"/api/data/projects/{project_id}/session_secrets", headers=user_headers, json=patch + ) + assert response.status_code == 200, response.json + + _, response = await sanic_client.delete(f"/api/data/projects/{project_id}/session_secrets", headers=user_headers) + + assert response.status_code == 204, response.text + + # Check that the secrets are returned from a GET request + _, response = await sanic_client.get(f"/api/data/projects/{project_id}/session_secrets", headers=user_headers) + assert response.status_code == 200, response.json + assert response.json is not None + session_secrets = response.json + assert len(session_secrets) == 0 diff --git a/test/bases/renku_data_services/data_api/test_sessions.py b/test/bases/renku_data_services/data_api/test_sessions.py index bd2dfc0e1..7ef585363 100644 --- a/test/bases/renku_data_services/data_api/test_sessions.py +++ b/test/bases/renku_data_services/data_api/test_sessions.py @@ -6,9 +6,12 @@ import pytest from pytest import FixtureRequest from sanic_testing.testing import SanicASGITestClient, TestingResponse +from syrupy.filters import props -from renku_data_services.app_config.config import Config +from renku_data_services import errors from renku_data_services.crc.apispec import ResourcePool +from renku_data_services.data_api.dependencies import DependencyManager +from renku_data_services.session.models import EnvVar from renku_data_services.users.models import UserInfo @@ -17,13 +20,14 @@ def launch_session( sanic_client: SanicASGITestClient, user_headers: dict, regular_user: UserInfo, - app_config: Config, + app_manager: DependencyManager, request: FixtureRequest, event_loop: AbstractEventLoop, ): async def launch_session_helper( - payload: dict, headers: dict = user_headers, user: UserInfo = regular_user + payload: dict, headers: dict = None, user: UserInfo = regular_user ) -> TestingResponse: + headers = headers or user_headers _, res = await sanic_client.post("/api/data/sessions", headers=headers, json=payload) assert res.status_code == 201, res.text assert res.json is not None @@ -31,7 +35,9 @@ async def launch_session_helper( session_id: str = res.json.get("name", "unknown") def cleanup(): - event_loop.run_until_complete(app_config.nb_config.k8s_v2_client.delete_server(session_id, user.id)) + event_loop.run_until_complete( + app_manager.config.nb_config.k8s_v2_client.delete_session(session_id, user.id) + ) # request.addfinalizer(cleanup) return res @@ -41,11 +47,12 @@ def cleanup(): @pytest.mark.asyncio async def test_get_all_session_environments( - sanic_client: SanicASGITestClient, unauthorized_headers, create_session_environment + sanic_client: SanicASGITestClient, unauthorized_headers, create_session_environment, snapshot ) -> None: await create_session_environment("Environment 1") await create_session_environment("Environment 2") await create_session_environment("Environment 3") + await create_session_environment("Environment 4", is_archived=True) _, res = await sanic_client.get("/api/data/environments", headers=unauthorized_headers) @@ -56,7 +63,23 @@ async def test_get_all_session_environments( "Environment 1", "Environment 2", "Environment 3", + "Python/Jupyter", # environments added by bootstrap migration + "Rstudio", + } + _, res = await sanic_client.get("/api/data/environments?include_archived=true", headers=unauthorized_headers) + + assert res.status_code == 200, res.text + assert res.json is not None + environments = res.json + assert {env["name"] for env in environments} == { + "Environment 1", + "Environment 2", + "Environment 3", + "Environment 4", + "Python/Jupyter", # environments added by bootstrap migration + "Rstudio", } + assert environments == snapshot(exclude=props("id", "creation_date")) @pytest.mark.asyncio @@ -80,11 +103,22 @@ async def test_get_session_environment( @pytest.mark.asyncio -async def test_post_session_environment(sanic_client: SanicASGITestClient, admin_headers) -> None: +@pytest.mark.parametrize( + "image_name", + [ + "renku/renku", + "u/renku/renku:latest", + "docker.io/renku/renku:latest", + "renku/renku@sha256:eceed25752d7544db159e4144a41ed6e96e667f39ff9fa18322d79c33729a18c", + "registry.renkulab.io/john.doe/test-34:38d8b3d", + ], +) +async def test_post_session_environment(sanic_client: SanicASGITestClient, admin_headers, image_name: str) -> None: payload = { "name": "Environment 1", "description": "A session environment.", - "container_image": "some_image:some_tag", + "container_image": image_name, + "environment_image_source": "image", } _, res = await sanic_client.post("/api/data/environments", headers=admin_headers, json=payload) @@ -93,7 +127,33 @@ async def test_post_session_environment(sanic_client: SanicASGITestClient, admin assert res.json is not None assert res.json.get("name") == "Environment 1" assert res.json.get("description") == "A session environment." - assert res.json.get("container_image") == "some_image:some_tag" + assert res.json.get("container_image") == image_name + assert not res.json.get("is_archived") + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "image_name", + [ + "https://example.com/r/test:latest", + "renku/_bla", + "renku/test:töst", + "renku/test@sha254:abcd", + " renku/test:latest", + ], +) +async def test_post_session_environment_invalid_image( + sanic_client: SanicASGITestClient, admin_headers, image_name: str +) -> None: + payload = { + "name": "Environment 1", + "description": "A session environment.", + "container_image": image_name, + } + + _, res = await sanic_client.post("/api/data/environments", headers=admin_headers, json=payload) + + assert res.status_code == 422, res.text @pytest.mark.asyncio @@ -102,6 +162,7 @@ async def test_post_session_environment_unauthorized(sanic_client: SanicASGITest "name": "Environment 1", "description": "A session environment.", "container_image": "some_image:some_tag", + "environment_image_source": "image", } _, res = await sanic_client.post("/api/data/environments", headers=user_headers, json=payload) @@ -124,6 +185,8 @@ async def test_patch_session_environment( "container_image": "new_image:new_tag", "command": command, "args": args, + "working_directory": "/home/user", + "mount_directory": "/home/user/work", } _, res = await sanic_client.patch(f"/api/data/environments/{environment_id}", headers=admin_headers, json=payload) @@ -135,12 +198,75 @@ async def test_patch_session_environment( assert res.json.get("container_image") == "new_image:new_tag" assert res.json.get("args") == args assert res.json.get("command") == command + assert res.json.get("working_directory") == "/home/user" + assert res.json.get("mount_directory") == "/home/user/work" - # Test that patching with None will reset the command and args - payload = {"args": None, "command": None} + # Test that patching with None will reset the command and args, + # and also that we can reset the working and mounting directories + payload = { + "args": None, + "command": None, + "working_directory": "", + "mount_directory": "", + } _, res = await sanic_client.patch(f"/api/data/environments/{environment_id}", headers=admin_headers, json=payload) + assert res.status_code == 200, res.text + assert res.json is not None assert res.json.get("args") is None assert res.json.get("command") is None + assert res.json.get("working_directory") is None + assert res.json.get("mount_directory") is None + + +@pytest.mark.asyncio +async def test_patch_session_environment_archived( + sanic_client: SanicASGITestClient, + admin_headers, + create_session_environment, + create_project, + valid_resource_pool_payload, + create_resource_pool, +) -> None: + env = await create_session_environment("Environment 1") + environment_id = env["id"] + + payload = {"is_archived": True} + + _, res = await sanic_client.patch(f"/api/data/environments/{environment_id}", headers=admin_headers, json=payload) + + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("is_archived") + + # Test that you can't create a launcher with an archived environment + project = await create_project("Some project") + resource_pool_data = valid_resource_pool_payload + resource_pool_data["public"] = False + + resource_pool = await create_resource_pool(admin=True, **resource_pool_data) + + session_payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "resource_class_id": resource_pool["classes"][0]["id"], + "environment": {"id": environment_id}, + } + + _, res = await sanic_client.post("/api/data/session_launchers", headers=admin_headers, json=session_payload) + + assert res.status_code == 422, res.text + + # test unarchiving allows launcher creation again + payload = {"is_archived": False} + + _, res = await sanic_client.patch(f"/api/data/environments/{environment_id}", headers=admin_headers, json=payload) + assert res.status_code == 200, res.text + assert not res.json.get("is_archived") + + _, res = await sanic_client.post("/api/data/session_launchers", headers=admin_headers, json=session_payload) + + assert res.status_code == 201, res.text @pytest.mark.asyncio @@ -191,6 +317,7 @@ async def test_get_all_session_launchers( user_headers, create_project, create_session_launcher, + snapshot, ) -> None: project_1 = await create_project("Project 1") project_2 = await create_project("Project 2") @@ -204,11 +331,13 @@ async def test_get_all_session_launchers( assert res.status_code == 200, res.text assert res.json is not None launchers = res.json + launchers = sorted(launchers, key=lambda e: e["name"]) assert {launcher["name"] for launcher in launchers} == { "Launcher 1", "Launcher 2", "Launcher 3", } + assert launchers == snapshot(exclude=props("id", "creation_date", "project_id")) @pytest.mark.asyncio @@ -265,30 +394,44 @@ async def test_get_project_launchers( assert {launcher["name"] for launcher in launchers} == {"Launcher 2", "Launcher 3"} +def test_env_variable_validation(): + renku_name_env_variables = { + "RENKU_KEY_NUMBER_1": "a value", + "RENKULAB_THING": "another value", + } + with pytest.raises(errors.ValidationError) as excinfo: + EnvVar.from_dict(renku_name_env_variables) + assert excinfo.value.message == "Env variable name 'RENKU_KEY_NUMBER_1' should not start with 'RENKU'." + + non_posix_name_env_variables = { + "1foo": "a value", + "thing=bar": "another value", + } + with pytest.raises(errors.ValidationError) as excinfo: + EnvVar.from_dict(non_posix_name_env_variables) + assert excinfo.value.message == "Env variable name '1foo' must match the regex '^[a-zA-Z_][a-zA-Z0-9_]*$'." + + @pytest.mark.asyncio async def test_post_session_launcher( - sanic_client: SanicASGITestClient, - valid_resource_pool_payload: dict[str, Any], - user_headers, - admin_headers, - member_1_headers, - create_project, - create_resource_pool, + sanic_client, admin_headers, create_project, create_resource_pool, app_manager ) -> None: project = await create_project("Some project") - resource_pool_data = valid_resource_pool_payload - resource_pool = await create_resource_pool(admin=True, **resource_pool_data) + resource_pool = await create_resource_pool(admin=True) payload = { "name": "Launcher 1", "project_id": project["id"], "description": "A session launcher.", "resource_class_id": resource_pool["classes"][0]["id"], + "disk_storage": 2, + "env_variables": [{"name": "KEY_NUMBER_1", "value": "a value"}], "environment": { "container_image": "some_image:some_tag", "name": "custom_name", "environment_kind": "CUSTOM", + "environment_image_source": "image", }, } @@ -300,10 +443,100 @@ async def test_post_session_launcher( assert res.json.get("project_id") == project["id"] assert res.json.get("description") == "A session launcher." environment = res.json.get("environment", {}) + assert environment.get("name") == "custom_name" assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("environment_image_source") == "image" assert environment.get("container_image") == "some_image:some_tag" assert environment.get("id") is not None assert res.json.get("resource_class_id") == resource_pool["classes"][0]["id"] + assert res.json.get("disk_storage") == 2 + assert res.json.get("env_variables") == [{"name": "KEY_NUMBER_1", "value": "a value"}] + app_manager.metrics.session_launcher_created.assert_called_once() + + +@pytest.mark.asyncio +async def test_post_session_launcher_with_environment_build( + sanic_client, + admin_headers, + create_project, + create_resource_pool, +) -> None: + project = await create_project("Some project") + + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + } + + _, response = await sanic_client.post("/api/data/session_launchers", headers=admin_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + assert response.json.get("name") == "Launcher 1" + assert response.json.get("project_id") == project["id"] + assert response.json.get("description") == "A session launcher." + environment = response.json.get("environment", {}) + assert environment.get("id") is not None + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" + + +@pytest.mark.asyncio +async def test_post_session_launcher_with_advanced_environment_build( + sanic_client: SanicASGITestClient, + user_headers: dict[str, str], + create_project, +) -> None: + project = await create_project("Some project") + + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "repository_revision": "some-branch", + "context_dir": "path/to/context", + "environment_image_source": "build", + }, + } + + _, response = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + + assert response.status_code == 201, response.text + assert response.json is not None + assert response.json.get("name") == "Launcher 1" + assert response.json.get("project_id") == project["id"] + assert response.json.get("description") == "A session launcher." + environment = response.json.get("environment", {}) + assert environment.get("id") is not None + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "repository_revision": "some-branch", + "context_dir": "path/to/context", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" @pytest.mark.asyncio @@ -374,6 +607,7 @@ async def test_patch_session_launcher( "container_image": "some_image:some_tag", "name": "custom_name", "environment_kind": "CUSTOM", + "environment_image_source": "image", }, } @@ -388,11 +622,15 @@ async def test_patch_session_launcher( assert environment.get("container_image") == "some_image:some_tag" assert environment.get("id") is not None assert res.json.get("resource_class_id") == resource_pool["classes"][0]["id"] + assert res.json.get("disk_storage") is None + assert res.json.get("env_variables") is None patch_payload = { "name": "New Name", "description": "An updated session launcher.", "resource_class_id": resource_pool["classes"][1]["id"], + "disk_storage": 3, + "env_variables": [{"name": "KEY_NUMBER_2", "value": "another value"}], } _, res = await sanic_client.patch( f"/api/data/session_launchers/{res.json['id']}", headers=user_headers, json=patch_payload @@ -402,6 +640,8 @@ async def test_patch_session_launcher( assert res.json.get("name") == patch_payload["name"] assert res.json.get("description") == patch_payload["description"] assert res.json.get("resource_class_id") == patch_payload["resource_class_id"] + assert res.json.get("disk_storage") == 3 + assert res.json.get("env_variables") == [{"name": "KEY_NUMBER_2", "value": "another value"}] @pytest.mark.asyncio @@ -428,6 +668,7 @@ async def test_patch_session_launcher_environment( "container_image": "some_image:some_tag", "name": "custom_name", "environment_kind": "CUSTOM", + "environment_image_source": "image", }, } _, res = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) @@ -438,19 +679,31 @@ async def test_patch_session_launcher_environment( assert environment.get("container_image") == "some_image:some_tag" assert environment.get("id") is not None + launcher_id = res.json["id"] + # Patch in a global environment patch_payload = { "environment": {"id": global_env["id"]}, } _, res = await sanic_client.patch( - f"/api/data/session_launchers/{res.json['id']}", headers=user_headers, json=patch_payload + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload ) assert res.status_code == 200, res.text assert res.json is not None - launcher_id = res.json["id"] global_env["environment_kind"] = "GLOBAL" + global_env["environment_image_source"] = "image" assert res.json["environment"] == global_env + # Trying to patch with some random fields should fail + patch_payload = { + "environment": {"random_field": "random_value"}, + } + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 422, res.text + assert "There are errors in the following fields, id: Input should be a valid string" in res.text + # Trying to patch a field of the global environment should fail patch_payload = { "environment": {"container_image": "new_image"}, @@ -462,13 +715,20 @@ async def test_patch_session_launcher_environment( # Patching in a wholly new custom environment over the global is allowed patch_payload = { - "environment": {"container_image": "new_image", "name": "new_custom", "environment_kind": "CUSTOM"}, + "environment": { + "container_image": "new_image", + "name": "new_custom", + "environment_kind": "CUSTOM", + "environment_image_source": "image", + }, } _, res = await sanic_client.patch( f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload ) assert res.status_code == 200, res.text + environment_id = res.json["environment"]["id"] + # Should be able to patch some fields of the custom environment patch_payload = { "environment": {"container_image": "nginx:latest", "args": ["a", "b", "c"]}, @@ -477,6 +737,7 @@ async def test_patch_session_launcher_environment( f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload ) assert res.status_code == 200, res.text + assert res.json["environment"]["id"] == environment_id assert res.json["environment"]["container_image"] == "nginx:latest" assert res.json["environment"]["args"] == ["a", "b", "c"] @@ -488,59 +749,747 @@ async def test_patch_session_launcher_environment( f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload ) assert res.status_code == 200, res.text + assert res.json["environment"]["id"] == environment_id assert res.json["environment"].get("args") is None assert res.json["environment"].get("command") is None + # Should not be able to patch fields for the built environment + patch_payload = { + "environment": {"build_parameters": {"repository": "https://github.com/repo.get"}}, + } + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 422, res.text -@pytest.fixture -def anonymous_user_headers() -> dict[str, str]: - return {"Renku-Auth-Anon-Id": "some-random-value-1234"} + # Should not be able to change the custom environment to be built from a repository + patch_payload = { + "environment": { + "environment_image_source": "build", + "build_parameters": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + }, + }, + } + + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment.get("id") == environment_id + assert environment.get("name") == "new_custom" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" @pytest.mark.asyncio -@pytest.mark.skip(reason="Setup for testing sessions is not done yet.") # TODO: enable in follwup PR -async def test_starting_session_anonymous( +async def test_patch_session_launcher_environment_with_build_parameters( sanic_client: SanicASGITestClient, - create_project, - create_session_launcher, user_headers, - app_config: Config, - admin_headers, - launch_session, - anonymous_user_headers, - cluster, + create_project, + create_resource_pool, + create_session_environment, ) -> None: - _, res = await sanic_client.post( - "/api/data/resource_pools", - json=ResourcePool.model_validate(app_config.default_resource_pool, from_attributes=True).model_dump( - mode="json", exclude_none=True - ), - headers=admin_headers, - ) + project = await create_project("Some project 1") + resource_pool = await create_resource_pool(admin=True) + global_env = await create_session_environment("Some environment") + + # Create a global environment with the launcher + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "resource_class_id": resource_pool["classes"][0]["id"], + "environment": {"id": global_env["id"]}, + } + _, res = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) assert res.status_code == 201, res.text - project: dict[str, Any] = await create_project( - "Some project", - visibility="public", - repositories=["https://github.com/SwissDataScienceCenter/renku-data-services"], - ) - launcher: dict[str, Any] = await create_session_launcher( - "Launcher 1", - project_id=project["id"], - environment={ - "container_image": "renku/renkulab-py:3.10-0.23.0-amalthea-sessions-3", + assert res.json is not None + global_env["environment_kind"] = "GLOBAL" + global_env["environment_image_source"] = "image" + assert res.json["environment"] == global_env + + launcher_id = res.json["id"] + + patch_payload = { + "environment": { "environment_kind": "CUSTOM", - "name": "test", - "port": 8888, + "environment_image_source": "build", + "build_parameters": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + }, }, + } + + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload ) - launcher_id = launcher["id"] - project_id = project["id"] - payload = {"project_id": project_id, "launcher_id": launcher_id} - session_res = await launch_session(payload, headers=anonymous_user_headers) - _, res = await sanic_client.get(f"/api/data/sessions/{session_res.json['name']}", headers=anonymous_user_headers) + assert res.status_code == 200, res.text - assert res.json["name"] == session_res.json["name"] - _, res = await sanic_client.get("/api/data/sessions", headers=anonymous_user_headers) + + _, res = await sanic_client.get(f"/api/data/session_launchers/{launcher_id}", headers=user_headers) + assert res.status_code == 200, res.text - assert len(res.json) > 0 - assert session_res.json["name"] in [i["name"] for i in res.json] + assert res.json is not None + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment.get("id") is not None + assert environment.get("id") != global_env["id"] + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" + + environment_id = environment["id"] + + # Patch the build parameters + patch_payload = { + "environment": { + "build_parameters": { + "repository": "new_repo", + "builder_variant": "python", + }, + }, + } + + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment.get("id") == environment_id + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "new_repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" + + # Back to a custom environment with image + patch_payload = { + "environment": { + "container_image": "new_image", + "name": "new_custom", + "environment_kind": "CUSTOM", + "environment_image_source": "image", + }, + } + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 200, res.text + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment.get("id") == environment_id + assert environment.get("name") == "new_custom" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") is None + assert environment.get("environment_image_source") == "image" + assert environment.get("container_image") == "new_image" + + +@pytest.mark.asyncio +@pytest.mark.parametrize("builder_variant, frontend_variant", [("conda", "vscodium"), ("python", "jupyter")]) +async def test_post_session_launcher_environment_with_invalid_build_parameters( + sanic_client, user_headers, create_project, builder_variant, frontend_variant +) -> None: + project = await create_project("Project") + + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": builder_variant, + "frontend_variant": frontend_variant, + "environment_image_source": "build", + }, + } + + _, res = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + assert res.status_code == 422, res.text + assert "Invalid value for the field" in res.text + assert "Valid values are" in res.text + + +@pytest.mark.asyncio +@pytest.mark.parametrize("builder_variant, frontend_variant", [("conda", "vscodium"), ("python", "jupyter")]) +async def test_patch_session_launcher_environment_with_invalid_build_parameters( + sanic_client, user_headers, create_project, create_session_launcher, builder_variant, frontend_variant +) -> None: + project = await create_project("Project") + + session_launcher = await create_session_launcher( + name="Launcher", + project_id=project["id"], + environment={ + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + ) + launcher_id = session_launcher["id"] + + patch_payload = { + "environment": { + "build_parameters": { + "builder_variant": builder_variant, + "frontend_variant": frontend_variant, + }, + } + } + + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 422, res.text + assert "Invalid value for the field" in res.text + assert "Valid values are" in res.text + + +@pytest.mark.asyncio +async def test_patch_session_launcher_invalid_env_variables( + sanic_client: SanicASGITestClient, + valid_resource_pool_payload: dict[str, Any], + user_headers, + create_project, + create_resource_pool, + create_session_environment, +) -> None: + project = await create_project("Some project 1") + resource_pool_data = valid_resource_pool_payload + resource_pool = await create_resource_pool(admin=True, **resource_pool_data) + + # Create a new custom environment with the launcher + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "resource_class_id": resource_pool["classes"][0]["id"], + "environment": { + "container_image": "some_image:some_tag", + "name": "custom_name", + "environment_kind": "CUSTOM", + "environment_image_source": "image", + }, + } + _, res = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + assert res.status_code == 201, res.text + assert res.json is not None + environment = res.json.get("environment", {}) + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("container_image") == "some_image:some_tag" + assert environment.get("id") is not None + + launcher_id = res.json["id"] + # Should not be able use env variables that start with 'renku' + patch_payload = {"env_variables": [{"name": "renkustuff_1", "value": "a value"}]} + + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 422, res.text + assert "Env variable name 'renkustuff_1'" in res.text + + +@pytest.mark.asyncio +async def test_patch_session_launcher_reset_fields( + sanic_client: SanicASGITestClient, + valid_resource_pool_payload: dict[str, Any], + user_headers, + create_project, + create_resource_pool, +) -> None: + project = await create_project("Some project 1") + resource_pool_data = valid_resource_pool_payload + resource_pool = await create_resource_pool(admin=True, **resource_pool_data) + + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "resource_class_id": resource_pool["classes"][0]["id"], + "disk_storage": 2, + "env_variables": [{"name": "KEY_NUMBER_1", "value": "a value"}], + "environment": { + "container_image": "some_image:some_tag", + "name": "custom_name", + "environment_kind": "CUSTOM", + "environment_image_source": "image", + }, + } + + _, res = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + + assert res.status_code == 201, res.text + assert res.json is not None + assert res.json.get("name") == "Launcher 1" + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("container_image") == "some_image:some_tag" + assert environment.get("id") is not None + assert res.json.get("resource_class_id") == resource_pool["classes"][0]["id"] + assert res.json.get("disk_storage") == 2 + assert res.json.get("env_variables") == [{"name": "KEY_NUMBER_1", "value": "a value"}] + + patch_payload = {"resource_class_id": None, "disk_storage": None, "env_variables": None} + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{res.json['id']}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("resource_class_id") is None + assert res.json.get("disk_storage") is None + assert res.json.get("env_variables") is None + + +@pytest.mark.asyncio +async def test_patch_session_launcher_keeps_unset_values( + sanic_client, user_headers, create_project, create_resource_pool, create_session_launcher +) -> None: + project = await create_project("Some project") + resource_pool = await create_resource_pool(admin=True) + session_launcher = await create_session_launcher( + name="Session Launcher", + project_id=project["id"], + description="A session launcher.", + resource_class_id=resource_pool["classes"][0]["id"], + disk_storage=42, + env_variables=[{"name": "KEY_NUMBER_1", "value": "a value"}], + environment={ + "container_image": "some_image:some_tag", + "environment_kind": "CUSTOM", + "name": "custom_name", + "environment_image_source": "image", + }, + ) + + _, response = await sanic_client.patch( + f"/api/data/session_launchers/{session_launcher['id']}", headers=user_headers, json={} + ) + + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("name") == "Session Launcher" + assert response.json.get("project_id") == project["id"] + assert response.json.get("description") == "A session launcher." + assert response.json.get("resource_class_id") == resource_pool["classes"][0]["id"] + assert response.json.get("disk_storage") == 42 + assert response.json.get("env_variables") == [{"name": "KEY_NUMBER_1", "value": "a value"}] + environment = response.json.get("environment", {}) + assert environment.get("container_image") == "some_image:some_tag" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("name") == "custom_name" + assert environment.get("id") is not None + + +@pytest.mark.asyncio +async def test_patch_session_launcher_with_advanced_environment_build( + sanic_client: SanicASGITestClient, + user_headers: dict[str, str], + create_project, + create_resource_pool, +) -> None: + project = await create_project("Some project") + + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + } + + _, res = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + + assert res.status_code == 201, res.text + assert res.json is not None + assert res.json.get("id") is not None + launcher_id = res.json["id"] + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment["id"] is not None + environment_id = environment.get("id") + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" + + patch_payload = { + "environment": {"build_parameters": {"context_dir": "some/path", "repository_revision": "some-branch"}} + } + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("id") == launcher_id + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment["id"] == environment_id + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "context_dir": "some/path", + "repository_revision": "some-branch", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" + + # Check that we can reset the advanced parameters + patch_payload = { + "environment": { + "build_parameters": { + "context_dir": "", + } + } + } + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("id") == launcher_id + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment["id"] == environment_id + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "repository_revision": "some-branch", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" + + patch_payload = { + "environment": { + "build_parameters": {"frontend_variant": "jupyterlab", "context_dir": "", "repository_revision": ""} + } + } + _, res = await sanic_client.patch( + f"/api/data/session_launchers/{launcher_id}", headers=user_headers, json=patch_payload + ) + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("id") == launcher_id + assert res.json.get("name") == "Launcher 1" + assert res.json.get("project_id") == project["id"] + assert res.json.get("description") == "A session launcher." + environment = res.json.get("environment", {}) + assert environment["id"] == environment_id + assert environment.get("name") == "Launcher 1" + assert environment.get("environment_kind") == "CUSTOM" + assert environment.get("build_parameters") == { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "jupyterlab", + } + assert environment.get("environment_image_source") == "build" + assert environment.get("container_image") == "image:unknown-at-the-moment" + + +@pytest.fixture +def anonymous_user_headers() -> dict[str, str]: + return {"Renku-Auth-Anon-Id": "some-random-value-1234"} + + +@pytest.mark.asyncio +@pytest.mark.skip(reason="Setup for testing sessions is not done yet.") # TODO: enable in followup PR +async def test_starting_session_anonymous( + sanic_client: SanicASGITestClient, + create_project, + create_session_launcher, + user_headers, + app_manager: DependencyManager, + admin_headers, + launch_session, + anonymous_user_headers, +) -> None: + _, res = await sanic_client.post( + "/api/data/resource_pools", + json=ResourcePool.model_validate(app_manager.default_resource_pool, from_attributes=True).model_dump( + mode="json", exclude_none=True + ), + headers=admin_headers, + ) + assert res.status_code == 201, res.text + project: dict[str, Any] = await create_project( + "Some project", + visibility="public", + repositories=["https://github.com/SwissDataScienceCenter/renku-data-services"], + ) + launcher: dict[str, Any] = await create_session_launcher( + "Launcher 1", + project_id=project["id"], + environment={ + "container_image": "renku/renkulab-py:3.10-0.23.0-amalthea-sessions-3", + "environment_kind": "CUSTOM", + "name": "test", + "port": 8888, + }, + env_variables=[ + {"name": "TEST_ENV_VAR", "value": "some-random-value-1234"}, + ], + ) + launcher_id = launcher["id"] + project_id = project["id"] + payload = {"project_id": project_id, "launcher_id": launcher_id} + session_res = await launch_session(payload, headers=anonymous_user_headers) + _, res = await sanic_client.get(f"/api/data/sessions/{session_res.json['name']}", headers=anonymous_user_headers) + assert res.status_code == 200, res.text + assert res.json["name"] == session_res.json["name"] + _, res = await sanic_client.get("/api/data/sessions", headers=anonymous_user_headers) + assert res.status_code == 200, res.text + assert len(res.json) > 0 + assert session_res.json["name"] in [i["name"] for i in res.json] + + +@pytest.mark.asyncio +async def test_rebuild(sanic_client: SanicASGITestClient, user_headers, create_project) -> None: + project = await create_project("Some project") + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + } + _, response = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + launcher = response.json + environment_id = launcher["environment"]["id"] + + # Trying to rebuild fails since a build is already in progress when session launcher is created + _, response = await sanic_client.post(f"/api/data/environments/{environment_id}/builds", headers=user_headers) + + assert response.status_code == 409, response.text + assert "already has a build in progress." in response.text + + # Cancel the build + _, response = await sanic_client.get(f"/api/data/environments/{environment_id}/builds", headers=user_headers) + assert response.status_code == 200, response.text + build = response.json[0] + + _, response = await sanic_client.patch( + f"/api/data/builds/{build['id']}", json={"status": "cancelled"}, headers=user_headers + ) + assert response.status_code == 200, response.text + + # Rebuild + _, response = await sanic_client.post(f"/api/data/environments/{environment_id}/builds", headers=user_headers) + + assert response.status_code == 201, response.text + assert response.json is not None + build = response.json + assert build.get("id") is not None + assert build.get("environment_id") == environment_id + assert build.get("created_at") is not None + assert build.get("status") == "in_progress" + assert build.get("result") is None + + +@pytest.mark.asyncio +async def test_get_build(sanic_client: SanicASGITestClient, user_headers, create_project) -> None: + project = await create_project("Some project") + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + } + _, response = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + launcher = response.json + environment_id = launcher["environment"]["id"] + + _, response = await sanic_client.get( + f"/api/data/environments/{environment_id}/builds", + headers=user_headers, + ) + assert response.status_code == 200, response.text + build = response.json[0] + build_id = build["id"] + + _, response = await sanic_client.get( + f"/api/data/builds/{build_id}", + headers=user_headers, + ) + + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == build_id + assert response.json.get("environment_id") == environment_id + assert response.json.get("created_at") is not None + assert response.json.get("status") == "in_progress" + assert response.json.get("result") is None + + +@pytest.mark.asyncio +async def test_get_environment_builds(sanic_client: SanicASGITestClient, user_headers, create_project) -> None: + project = await create_project("Some project") + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + } + _, response = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + launcher = response.json + environment_id = launcher["environment"]["id"] + + _, response = await sanic_client.get( + f"/api/data/environments/{environment_id}/builds", + headers=user_headers, + ) + assert response.status_code == 200, response.text + build1 = response.json[0] + # Note: cancel this build so that we can post the next one + _, response = await sanic_client.patch( + f"/api/data/builds/{build1['id']}", + json={"status": "cancelled"}, + headers=user_headers, + ) + assert response.status_code == 200, response.text + + _, response = await sanic_client.post( + f"/api/data/environments/{environment_id}/builds", + headers=user_headers, + ) + assert response.status_code == 201, response.text + build2 = response.json + + _, response = await sanic_client.get( + f"/api/data/environments/{environment_id}/builds", + headers=user_headers, + ) + + assert response.status_code == 200, response.text + assert response.json is not None + builds = response.json + assert len(builds) == 2 + assert {build.get("id") for build in builds} == {build1["id"], build2["id"]} + + +@pytest.mark.asyncio +async def test_patch_build(sanic_client: SanicASGITestClient, user_headers, create_project) -> None: + project = await create_project("Some project") + payload = { + "name": "Launcher 1", + "project_id": project["id"], + "description": "A session launcher.", + "environment": { + "repository": "https://github.com/some/repo", + "builder_variant": "python", + "frontend_variant": "vscodium", + "environment_image_source": "build", + }, + } + _, response = await sanic_client.post("/api/data/session_launchers", headers=user_headers, json=payload) + assert response.status_code == 201, response.text + launcher = response.json + environment_id = launcher["environment"]["id"] + + _, response = await sanic_client.get( + f"/api/data/environments/{environment_id}/builds", + headers=user_headers, + ) + assert response.status_code == 200, response.text + build = response.json[0] + build_id = build["id"] + + payload = {"status": "cancelled"} + + _, response = await sanic_client.patch( + f"/api/data/builds/{build_id}", + json=payload, + headers=user_headers, + ) + + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == build_id + assert response.json.get("status") == "cancelled" + + _, response = await sanic_client.get( + f"/api/data/builds/{build_id}", + headers=user_headers, + ) + + assert response.status_code == 200, response.text + assert response.json is not None + assert response.json.get("id") == build_id + assert response.json.get("environment_id") == environment_id + assert response.json.get("created_at") is not None + assert response.json.get("status") == "cancelled" + assert response.json.get("result") is None diff --git a/test/bases/renku_data_services/data_api/test_smoke.py b/test/bases/renku_data_services/data_api/test_smoke.py index 895002405..02cb76fb7 100644 --- a/test/bases/renku_data_services/data_api/test_smoke.py +++ b/test/bases/renku_data_services/data_api/test_smoke.py @@ -65,4 +65,6 @@ def merge_dict_strategy(config: Merger, path: list, base: dict, nxt: dict) -> di try: merger.merge(base_dict, to_merge) except ApispecMergeError as err: - assert False, f"There was an unexpected conflict when merging {input_file} at field {err.path}\n{err.diff}" + raise AssertionError( + f"There was an unexpected conflict when merging {input_file} at field {err.path}\n{err.diff}" + ) from err diff --git a/test/bases/renku_data_services/data_api/test_storage.py b/test/bases/renku_data_services/data_api/test_storage.py index 5cf89ef59..976d46675 100644 --- a/test/bases/renku_data_services/data_api/test_storage.py +++ b/test/bases/renku_data_services/data_api/test_storage.py @@ -5,13 +5,15 @@ import pytest_asyncio from sanic import Sanic from sanic_testing.testing import SanicASGITestClient +from syrupy.filters import props -from renku_data_services.app_config import Config from renku_data_services.authn.dummy import DummyAuthenticator from renku_data_services.data_api.app import register_all_handlers +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from renku_data_services.storage.rclone import RCloneValidator from renku_data_services.utils.core import get_openbis_session_token +from renku_data_services.storage.rclone_patches import BANNED_STORAGE, OAUTH_PROVIDERS from test.utils import SanicReusableASGITestClient _valid_storage: dict[str, Any] = { @@ -33,11 +35,11 @@ def valid_storage_payload() -> dict[str, Any]: @pytest_asyncio.fixture(scope="session") -async def storage_test_client_setup(app_config: Config) -> SanicASGITestClient: +async def storage_test_client_setup(app_manager: DependencyManager) -> SanicASGITestClient: gitlab_auth = DummyAuthenticator() - app_config.gitlab_authenticator = gitlab_auth - app = Sanic(app_config.app_name) - app = register_all_handlers(app, app_config) + app_manager.gitlab_authenticator = gitlab_auth + app = Sanic(app_manager.app_name) + app = register_all_handlers(app, app_manager) validator = RCloneValidator() app.ext.dependency(validator) async with SanicReusableASGITestClient(app) as client: @@ -47,7 +49,7 @@ async def storage_test_client_setup(app_config: Config) -> SanicASGITestClient: @pytest_asyncio.fixture async def storage_test_client( storage_test_client_setup, - app_config_instance: Config, + app_manager_instance: DependencyManager, ) -> SanicASGITestClient: run_migrations_for_app("common") yield storage_test_client_setup @@ -257,6 +259,7 @@ async def test_storage_creation( expected_status_code: int, expected_storage_type: str, admin_headers: dict[str, str], + snapshot, ) -> None: storage_test_client, _ = storage_test_client _, res = await storage_test_client.post( @@ -271,6 +274,7 @@ async def test_storage_creation( assert res.json["storage"]["storage_type"] == expected_storage_type assert res.json["storage"]["name"] == payload["name"] assert res.json["storage"]["target_path"] == payload["target_path"] + assert res.json == snapshot(exclude=props("storage_id")) @pytest.mark.asyncio @@ -624,13 +628,53 @@ async def test_storage_validate_error_sensitive(storage_test_client) -> None: @pytest.mark.asyncio -async def test_storage_schema(storage_test_client) -> None: +async def test_storage_schema_patches(storage_test_client, snapshot) -> None: storage_test_client, _ = storage_test_client _, res = await storage_test_client.get("/api/data/storage_schema") - assert res.status_code == 200 - assert not next((e for e in res.json if e["prefix"] == "alias"), None) # prohibited storage - s3 = next(e for e in res.json if e["prefix"] == "s3") + assert res.status_code == 200, res.text + schema = res.json + assert not next((e for e in schema if e["prefix"] == "alias"), None) # prohibited storage + s3 = next(e for e in schema if e["prefix"] == "s3") assert s3 providers = next(p for p in s3["options"] if p["name"] == "provider") assert providers assert providers.get("examples") + + # check that switch provider is added to s3 + assert any(e["value"] == "Switch" for e in providers.get("examples")) + + # assert banned storage is not in schema + assert all(s["prefix"] not in BANNED_STORAGE for s in schema) + + # assert webdav password is sensitive + webdav = next((e for e in schema if e["prefix"] == "webdav"), None) + assert webdav + pwd = next((o for o in webdav["options"] if o["name"] == "pass"), None) + assert pwd + assert pwd.get("sensitive") + + # ensure that the endpoint is required for custom s3 storage + endpoints = [ + o + for o in s3["options"] + if o["name"] == "endpoint" and o["provider"].startswith("!AWS,ArvanCloud,IBMCOS,IDrive,IONOS,") + ] + assert endpoints + assert all(e.get("required") for e in endpoints) + + # assert oauth is disabled for all providers + oauth_providers = [s for s in schema if s["prefix"] in OAUTH_PROVIDERS] + assert all(o["name"] != "client_id" and o["name"] != "client_secret" for p in oauth_providers for o in p["options"]) + + # check custom webdav storage is added + assert any(s["prefix"] == "polybox" for s in schema) + assert any(s["prefix"] == "switchDrive" for s in schema) + assert schema == snapshot + + +@pytest.mark.asyncio +async def test_storage_validate_connection_supports_doi(storage_test_client) -> None: + storage_test_client, _ = storage_test_client + payload = {"configuration": {"type": "doi", "doi": "10.5281/zenodo.15174623"}, "source_path": ""} + _, res = await storage_test_client.post("/api/data/storage_schema/test_connection", json=payload) + assert res.status_code == 204, res.text diff --git a/test/bases/renku_data_services/data_api/test_user_preferences.py b/test/bases/renku_data_services/data_api/test_user_preferences.py index 8ab292ea5..4cc85a970 100644 --- a/test/bases/renku_data_services/data_api/test_user_preferences.py +++ b/test/bases/renku_data_services/data_api/test_user_preferences.py @@ -4,16 +4,19 @@ import pytest from sanic_testing.testing import SanicASGITestClient +from syrupy.filters import props from renku_data_services.base_models import APIUser -from test.bases.renku_data_services.data_api.utils import create_user_preferences +from test.bases.renku_data_services.data_api.utils import ( + create_user_preferences, + create_user_preferences_dismiss_banner, +) -_valid_add_pinned_project: dict[str, Any] = {"project_slug": "user.1/first-project"} - - -@pytest.fixture -def valid_add_pinned_project_payload() -> dict[str, Any]: - return _valid_add_pinned_project +_valid_add_pinned_project: list[dict[str, Any]] = [ + {"project_slug": "user.1/first-project"}, + {"project_slug": "john-doe-1/my-project-1"}, + {"project_slug": "a-1-2-3/b-1-2-3"}, +] @pytest.fixture @@ -44,8 +47,9 @@ def api_user() -> APIUser: @pytest.mark.asyncio +@pytest.mark.parametrize("valid_add_pinned_project_payload", _valid_add_pinned_project) async def test_get_user_preferences( - sanic_client: SanicASGITestClient, valid_add_pinned_project_payload: dict[str, Any], api_user: APIUser + sanic_client: SanicASGITestClient, valid_add_pinned_project_payload: dict[str, Any], api_user: APIUser, snapshot ) -> None: _, res = await create_user_preferences(sanic_client, valid_add_pinned_project_payload, api_user) assert res.status_code == 200 @@ -61,16 +65,24 @@ async def test_get_user_preferences( assert res.json.get("pinned_projects") is not None assert len(res.json["pinned_projects"].get("project_slugs")) == 1 project_slugs = res.json["pinned_projects"]["project_slugs"] - assert project_slugs[0] == "user.1/first-project" + assert project_slugs[0] == valid_add_pinned_project_payload["project_slug"] + assert res.json == snapshot(exclude=props("user_id")) @pytest.mark.asyncio +@pytest.mark.parametrize("valid_add_pinned_project_payload", _valid_add_pinned_project) async def test_post_user_preferences_pinned_projects( sanic_client: SanicASGITestClient, valid_add_pinned_project_payload: dict[str, Any], api_user: APIUser ) -> None: _, res = await create_user_preferences(sanic_client, valid_add_pinned_project_payload, api_user) assert res.status_code == 200 + _, res = await sanic_client.post( + "/api/data/user/preferences/pinned_projects", + headers={"Authorization": f"bearer {api_user.access_token}"}, + data=json.dumps(dict(project_slug="/user.2/second-project///")), + ) + assert res.status_code == 422 _, res = await sanic_client.post( "/api/data/user/preferences/pinned_projects", headers={"Authorization": f"bearer {api_user.access_token}"}, @@ -83,11 +95,12 @@ async def test_post_user_preferences_pinned_projects( assert res.json.get("pinned_projects") is not None assert len(res.json["pinned_projects"].get("project_slugs")) == 2 project_slugs = res.json["pinned_projects"]["project_slugs"] - assert project_slugs[0] == "user.1/first-project" + assert project_slugs[0] == valid_add_pinned_project_payload["project_slug"] assert project_slugs[1] == "user.2/second-project" @pytest.mark.asyncio +@pytest.mark.parametrize("valid_add_pinned_project_payload", _valid_add_pinned_project) async def test_post_user_preferences_pinned_projects_existing( sanic_client: SanicASGITestClient, valid_add_pinned_project_payload: dict[str, Any], api_user: APIUser ) -> None: @@ -106,10 +119,11 @@ async def test_post_user_preferences_pinned_projects_existing( assert res.json.get("pinned_projects") is not None assert len(res.json["pinned_projects"].get("project_slugs")) == 1 project_slugs = res.json["pinned_projects"]["project_slugs"] - assert project_slugs[0] == "user.1/first-project" + assert project_slugs[0] == valid_add_pinned_project_payload["project_slug"] @pytest.mark.asyncio +@pytest.mark.parametrize("valid_add_pinned_project_payload", _valid_add_pinned_project) async def test_delete_user_preferences_pinned_projects( sanic_client: SanicASGITestClient, valid_add_pinned_project_payload: dict[str, Any], api_user: APIUser ) -> None: @@ -118,7 +132,7 @@ async def test_delete_user_preferences_pinned_projects( _, res = await sanic_client.delete( "/api/data/user/preferences/pinned_projects", - params=dict(project_slug="user.1/first-project"), + params=dict(project_slug=valid_add_pinned_project_payload["project_slug"]), headers={"Authorization": f"bearer {api_user.access_token}"}, ) @@ -130,6 +144,7 @@ async def test_delete_user_preferences_pinned_projects( @pytest.mark.asyncio +@pytest.mark.parametrize("valid_add_pinned_project_payload", _valid_add_pinned_project) async def test_delete_user_preferences_pinned_projects_unknown( sanic_client: SanicASGITestClient, valid_add_pinned_project_payload: dict[str, Any], api_user: APIUser ) -> None: @@ -148,4 +163,27 @@ async def test_delete_user_preferences_pinned_projects_unknown( assert res.json.get("pinned_projects") is not None assert len(res.json["pinned_projects"].get("project_slugs")) == 1 project_slugs = res.json["pinned_projects"]["project_slugs"] - assert project_slugs[0] == "user.1/first-project" + assert project_slugs[0] == valid_add_pinned_project_payload["project_slug"] + + +@pytest.mark.asyncio +async def test_add_user_preferences_dismiss_projects_migration_banner( + sanic_client: SanicASGITestClient, api_user: APIUser +) -> None: + _, res = await create_user_preferences_dismiss_banner(sanic_client, api_user) + assert res.status_code == 200 + + assert res.json is not None + assert res.json.get("user_id") == api_user.id + assert res.json.get("pinned_projects") == {"project_slugs": []} + assert not res.json.get("show_project_migration_banner") + assert res.json.get("show_project_migration_banner") is not None + + _, res = await sanic_client.delete( + "/api/data/user/preferences/dismiss_project_migration_banner", + headers={"Authorization": f"bearer {api_user.access_token}"}, + ) + + assert res.status_code == 200 + assert res.json.get("show_project_migration_banner") + assert res.json.get("show_project_migration_banner") is not None diff --git a/test/bases/renku_data_services/data_api/test_users.py b/test/bases/renku_data_services/data_api/test_users.py index e3134ec7b..a77fcd584 100644 --- a/test/bases/renku_data_services/data_api/test_users.py +++ b/test/bases/renku_data_services/data_api/test_users.py @@ -4,7 +4,8 @@ import pytest from ulid import ULID -from renku_data_services.namespace.models import Namespace, NamespaceKind +from renku_data_services.base_models.core import NamespacePath +from renku_data_services.namespace.models import UserNamespace from renku_data_services.users.models import UserInfo @@ -15,12 +16,11 @@ async def test_get_all_users_as_admin(sanic_client, users) -> None: first_name="Admin", last_name="Adminson", email="admin@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin.adminson", - kind=NamespaceKind.user, underlying_resource_id="admin-id", created_by="admin-id", + path=NamespacePath.from_strings("admin.adminson"), ), ) admin_token = { @@ -174,7 +174,7 @@ async def test_logged_in_user_check_adds_user_if_missing(sanic_client, users, ad "first_name": user["first_name"], "last_name": user["last_name"], "email": user["email"], - "name": f"{user["first_name"]} {user["last_name"]}", + "name": f"{user['first_name']} {user['last_name']}", } # Just by hitting the users endpoint with valid credentials the user will be aded to the database _, res = await sanic_client.get( @@ -223,7 +223,7 @@ async def test_delete_user(sanic_client, admin_headers) -> None: "first_name": user["first_name"], "last_name": user["last_name"], "email": user["email"], - "name": f"{user["first_name"]} {user["last_name"]}", + "name": f"{user['first_name']} {user['last_name']}", } # Just by hitting the users endpoint with valid credentials the user will be added to the database _, res = await sanic_client.get( @@ -282,7 +282,7 @@ async def test_get_self_user(sanic_client, user_headers, regular_user) -> None: assert response.json is not None user_info = response.json assert user_info.get("id") == regular_user.id - assert user_info.get("username") == regular_user.namespace.slug + assert user_info.get("username") == regular_user.namespace.path.serialize() assert user_info.get("email") == regular_user.email assert user_info.get("first_name") == regular_user.first_name assert user_info.get("last_name") == regular_user.last_name @@ -297,7 +297,7 @@ async def test_get_self_user_as_admin(sanic_client, admin_headers, admin_user) - assert response.json is not None user_info = response.json assert user_info.get("id") == admin_user.id - assert user_info.get("username") == admin_user.namespace.slug + assert user_info.get("username") == admin_user.namespace.path.serialize() assert user_info.get("email") == admin_user.email assert user_info.get("first_name") == admin_user.first_name assert user_info.get("last_name") == admin_user.last_name diff --git a/test/bases/renku_data_services/data_api/utils.py b/test/bases/renku_data_services/data_api/utils.py index ada2a3156..af4af4f6d 100644 --- a/test/bases/renku_data_services/data_api/utils.py +++ b/test/bases/renku_data_services/data_api/utils.py @@ -2,13 +2,11 @@ import os import shutil import subprocess -from base64 import b64decode from contextlib import AbstractContextManager from typing import Any import pytest import yaml -from dataclasses_avroschema import AvroModel from kubernetes import client as k8s_client from kubernetes import config as k8s_config from kubernetes import watch @@ -16,9 +14,6 @@ from sanic_testing.testing import SanicASGITestClient, TestingResponse from renku_data_services.base_models import APIUser -from renku_data_services.message_queue.avro_models.io.renku.events import v2 -from renku_data_services.message_queue.models import deserialize_binary -from renku_data_services.message_queue.orm import EventORM async def create_rp(payload: dict[str, Any], test_client: SanicASGITestClient) -> tuple[Request, TestingResponse]: @@ -40,6 +35,16 @@ async def create_user_preferences( ) +async def create_user_preferences_dismiss_banner( + test_client: SanicASGITestClient, api_user: APIUser +) -> tuple[Request, TestingResponse]: + """Create user preferences by dismiss migration project banner""" + return await test_client.post( + "/api/data/user/preferences/dismiss_project_migration_banner", + headers={"Authorization": f"bearer {api_user.access_token}"}, + ) + + def merge_headers(*headers: dict[str, str]) -> dict[str, str]: """Merge multiple headers.""" all_headers = dict() @@ -48,35 +53,6 @@ def merge_headers(*headers: dict[str, str]) -> dict[str, str]: return all_headers -def deserialize_event(event: EventORM) -> AvroModel: - """Deserialize an EventORM object.""" - event_type_mapping = { - "group.added": v2.GroupAdded, - "group.removed": v2.GroupRemoved, - "group.updated": v2.GroupUpdated, - "memberGroup.added": v2.GroupMemberAdded, - "memberGroup.removed": v2.GroupMemberRemoved, - "memberGroup.updated": v2.GroupMemberUpdated, - "projectAuth.added": v2.ProjectMemberAdded, - "projectAuth.removed": v2.ProjectMemberRemoved, - "projectAuth.updated": v2.ProjectMemberUpdated, - "project.created": v2.ProjectCreated, - "project.removed": v2.ProjectRemoved, - "project.updated": v2.ProjectUpdated, - "user.added": v2.UserAdded, - "user.removed": v2.UserRemoved, - "user.updated": v2.UserUpdated, - "reprovisioning.started": v2.ReprovisioningStarted, - "reprovisioning.finished": v2.ReprovisioningFinished, - } - - event_type = event_type_mapping.get(event.get_message_type()) - if not event_type: - raise ValueError(f"Unsupported message type: {event.get_message_type()}") - - return deserialize_binary(b64decode(event.payload["payload"]), event_type) - - def dataclass_to_str(object) -> str: """Convert a dataclass to str to make them hashable.""" data = object.asdict() @@ -91,10 +67,12 @@ def __init__( cluster_name: str, k3s_image="latest", kubeconfig=".k3d-config.yaml", - extra_images=[], + extra_images: list[str] | None = None, ): self.cluster_name = cluster_name self.k3s_image = k3s_image + if extra_images is None: + extra_images = [] self.extra_images = extra_images self.kubeconfig = kubeconfig self.env = os.environ.copy() @@ -200,7 +178,7 @@ def setup_amalthea(install_name: str, app_name: str, version: str, cluster: K3DC watcher.stop() break else: - assert False, "Timeout waiting on amalthea to run" + raise AssertionError("Timeout waiting on amalthea to run") from None class ClusterRequired: diff --git a/test/bases/renku_data_services/background_jobs/test_sync.py b/test/bases/renku_data_services/data_tasks/test_sync.py similarity index 60% rename from test/bases/renku_data_services/background_jobs/test_sync.py rename to test/bases/renku_data_services/data_tasks/test_sync.py index 8af8f5654..e0853ae39 100644 --- a/test/bases/renku_data_services/background_jobs/test_sync.py +++ b/test/bases/renku_data_services/data_tasks/test_sync.py @@ -1,6 +1,5 @@ import json import re -import secrets from collections.abc import Callable from dataclasses import asdict from datetime import datetime @@ -9,6 +8,7 @@ import pytest from authzed.api.v1 import ( + Consistency, DeleteRelationshipsRequest, ReadRelationshipsRequest, Relationship, @@ -19,119 +19,51 @@ ) from ulid import ULID -from bases.renku_data_services.background_jobs.config import SyncConfig from renku_data_services.authz.admin_sync import sync_admins_from_keycloak from renku_data_services.authz.authz import Authz, ResourceType, _AuthzConverter, _Relation from renku_data_services.authz.config import AuthzConfig from renku_data_services.authz.models import Role, UnsavedMember -from renku_data_services.background_jobs.core import ( +from renku_data_services.base_api.pagination import PaginationRequest +from renku_data_services.base_models import APIUser +from renku_data_services.base_models.core import NamespacePath, Slug +from renku_data_services.data_tasks.config import Config +from renku_data_services.data_tasks.dependencies import DependencyManager +from renku_data_services.data_tasks.task_defs import ( bootstrap_user_namespaces, fix_mismatched_project_namespace_ids, migrate_groups_make_all_public, - migrate_storages_v2_to_data_connectors, migrate_user_namespaces_make_all_public, ) -from renku_data_services.base_api.pagination import PaginationRequest -from renku_data_services.base_models import APIUser -from renku_data_services.data_connectors.db import DataConnectorProjectLinkRepository, DataConnectorRepository -from renku_data_services.data_connectors.migration_utils import DataConnectorMigrationTool from renku_data_services.db_config import DBConfig from renku_data_services.errors import errors -from renku_data_services.message_queue.config import RedisConfig -from renku_data_services.message_queue.db import EventRepository -from renku_data_services.message_queue.redis_queue import RedisQueue from renku_data_services.migrations.core import run_migrations_for_app from renku_data_services.namespace.apispec import ( GroupPostRequest, ) -from renku_data_services.namespace.db import GroupRepository -from renku_data_services.namespace.models import Namespace, NamespaceKind +from renku_data_services.namespace.models import UserNamespace from renku_data_services.namespace.orm import NamespaceORM -from renku_data_services.project.db import ProjectRepository from renku_data_services.project.models import UnsavedProject -from renku_data_services.storage.models import UnsavedCloudStorage -from renku_data_services.storage.orm import CloudStorageORM -from renku_data_services.users.db import UserRepo, UsersSync from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI from renku_data_services.users.models import KeycloakAdminEvent, UnsavedUserInfo, UserInfo, UserInfoFieldUpdate from renku_data_services.users.orm import UserORM @pytest.fixture -def get_app_configs(db_instance: DBConfig, authz_instance: AuthzConfig): - def _get_app_configs( - kc_api: DummyKeycloakAPI, total_user_sync: bool = False - ) -> tuple[ - SyncConfig, - UserRepo, - ]: - redis = RedisConfig.fake() - message_queue = RedisQueue(redis) - event_repo = EventRepository(db_instance.async_session_maker, message_queue=message_queue) - group_repo = GroupRepository( - session_maker=db_instance.async_session_maker, - event_repo=event_repo, - group_authz=Authz(authz_instance), - message_queue=message_queue, - ) - project_repo = ProjectRepository( - session_maker=db_instance.async_session_maker, - message_queue=message_queue, - event_repo=event_repo, - group_repo=group_repo, - authz=Authz(authz_instance), - ) - data_connector_repo = DataConnectorRepository( - session_maker=db_instance.async_session_maker, - authz=Authz(authz_instance), - ) - data_connector_project_link_repo = DataConnectorProjectLinkRepository( - session_maker=db_instance.async_session_maker, - authz=Authz(authz_instance), - ) - data_connector_migration_tool = DataConnectorMigrationTool( - session_maker=db_instance.async_session_maker, - data_connector_repo=data_connector_repo, - data_connector_project_link_repo=data_connector_project_link_repo, - project_repo=project_repo, - authz=Authz(authz_instance), - ) - user_repo = UserRepo( - db_instance.async_session_maker, - message_queue=message_queue, - event_repo=event_repo, - group_repo=group_repo, - encryption_key=secrets.token_bytes(32), - authz=Authz(authz_instance), - ) - users_sync = UsersSync( - db_instance.async_session_maker, - message_queue=message_queue, - event_repo=event_repo, - group_repo=group_repo, - user_repo=user_repo, - authz=Authz(authz_instance), - ) - config = SyncConfig( - syncer=users_sync, - kc_api=kc_api, - authz_config=authz_instance, - group_repo=group_repo, - event_repo=event_repo, - project_repo=project_repo, - data_connector_migration_tool=data_connector_migration_tool, - session_maker=db_instance.async_session_maker, - ) +def get_app_manager(db_instance: DBConfig, authz_instance: AuthzConfig): + def _get_dependency_manager(kc_api: DummyKeycloakAPI, total_user_sync: bool = False) -> DependencyManager: + config = Config.from_env() + config.db = db_instance + dm = DependencyManager.from_env(config) run_migrations_for_app("common") - return config, user_repo + return dm - yield _get_app_configs + yield _get_dependency_manager def get_kc_users(updates: list[UserInfo]) -> list[dict[str, Any]]: output: list[dict[str, Any]] = [] for update in updates: - output.append(update._to_keycloak_dict()) + output.append(update.to_keycloak_dict()) return output @@ -222,27 +154,24 @@ def get_kc_roles(role_names: list[str]) -> dict[str, list[dict[str, Union[bool, "composite": False, "clientRole": False, "containerId": str(uuid4()), - }, + } + for role_name in role_names ] - for role_name in role_names } @pytest.mark.asyncio -async def test_total_users_sync( - get_app_configs: Callable[..., tuple[SyncConfig, UserRepo]], admin_user: APIUser -) -> None: +async def test_total_users_sync(get_app_manager: Callable[..., DependencyManager], admin_user: APIUser) -> None: user1 = UserInfo( id="user-1-id", first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id="user-1-id", - slug="user-1", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", created_by="user-1-id", + path=NamespacePath.from_strings("user-1"), ), ) user2 = UserInfo( @@ -250,12 +179,11 @@ async def test_total_users_sync( first_name="Jane", last_name="Doe", email="jane.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id="user-2-id", - slug="user-2", - kind=NamespaceKind.user, underlying_resource_id="user-2-id", created_by="user-2-id", + path=NamespacePath.from_strings("user-2"), ), ) assert admin_user.id @@ -264,21 +192,19 @@ async def test_total_users_sync( first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=admin_user.id, - slug="admin", - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, created_by=admin_user.id, + path=NamespacePath.from_strings("admin"), ), ) user_roles = {admin_user.id: get_kc_roles(["renku-admin"])} kc_api = DummyKeycloakAPI(users=get_kc_users([user1, user2, admin_user_info]), user_roles=user_roles) - sync_config: SyncConfig - user_repo: UserRepo - sync_config, user_repo = get_app_configs(kc_api) - db_users = await user_repo.get_users(admin_user) - kc_users = [UserInfo.from_kc_user_payload(user) for user in sync_config.kc_api.get_users()] + dm = get_app_manager(kc_api) + dm.kc_api = kc_api + db_users = await dm.syncer.user_repo.get_users(admin_user) + kc_users = [UserInfo.from_kc_user_payload(user) for user in dm.kc_api.get_users()] kc_users.append( UnsavedUserInfo( id=admin_user.id, @@ -289,42 +215,37 @@ async def test_total_users_sync( ) assert set(u.id for u in kc_users) == set([user1.id, user2.id, admin_user_info.id]) assert len(db_users) == 1 # listing users add the requesting user if not present - await sync_config.syncer.users_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.users_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in kc_users) == set(u.id for u in db_users) # Make sure doing users sync again does not change anything and works - await sync_config.syncer.users_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.users_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in kc_users) == set(u.id for u in db_users) # Make sure that the addition of the users resulted in the creation of namespaces - nss, _ = await sync_config.syncer.group_repo.get_namespaces( - user=APIUser(id=user1.id), pagination=PaginationRequest(1, 100) - ) + nss, _ = await dm.syncer.group_repo.get_namespaces(user=APIUser(id=user1.id), pagination=PaginationRequest(1, 100)) assert len(nss) == 1 assert user1.email - assert nss[0].slug == user1.email.split("@")[0] - nss, _ = await sync_config.syncer.group_repo.get_namespaces( - user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100) - ) + assert nss[0].path.serialize() == user1.email.split("@")[0] + nss, _ = await dm.syncer.group_repo.get_namespaces(user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100)) assert len(nss) == 1 assert user2.email - assert nss[0].slug == user2.email.split("@")[0] + assert nss[0].path.serialize() == user2.email.split("@")[0] @pytest.mark.asyncio -async def test_user_events_update(get_app_configs, admin_user: APIUser) -> None: +async def test_user_events_update(get_app_manager, admin_user: APIUser) -> None: kc_api = DummyKeycloakAPI() user1 = UserInfo( id="user-1-id", first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) assert admin_user.id @@ -333,24 +254,22 @@ async def test_user_events_update(get_app_configs, admin_user: APIUser) -> None: first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) kc_api.users = get_kc_users([user1]) - sync_config: SyncConfig - user_repo: UserRepo - sync_config, user_repo = get_app_configs(kc_api) - db_users = await user_repo.get_users(admin_user) - kc_users = [UserInfo.from_kc_user_payload(user) for user in sync_config.kc_api.get_users()] + dm = get_app_manager(kc_api) + dm.kc_api = kc_api + db_users = await dm.syncer.user_repo.get_users(admin_user) + kc_users = [UserInfo.from_kc_user_payload(user) for user in dm.kc_api.get_users()] assert set(u.id for u in kc_users) == {user1.id} assert len(db_users) == 1 # listing users add the requesting user if not present - await sync_config.syncer.users_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.users_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) kc_users.append(admin_user_info) assert set(u.id for u in kc_users) == set(u.id for u in db_users) # Add update and create events @@ -359,49 +278,45 @@ async def test_user_events_update(get_app_configs, admin_user: APIUser) -> None: first_name="Jane", last_name="Doe", email="jane.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="jane.doe", created_by="user-2-id", - kind=NamespaceKind.user, underlying_resource_id="user-2-id", + path=NamespacePath.from_strings("jane.doe"), ), ) user1_update = UserInfoFieldUpdate("user-1-id", datetime.utcnow(), "first_name", "Johnathan") user1_updated = UserInfo(**{**asdict(user1), "first_name": "Johnathan"}) kc_api.user_events = get_kc_user_create_events([user2]) + get_kc_user_update_events([user1_update]) # Process events and check if updates show up - await sync_config.syncer.events_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.events_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in db_users) == set(u.id for u in [user1_updated, user2, admin_user_info]) # Ensure re-processing events does not break anything kc_api.user_events = get_kc_user_create_events([user2]) + get_kc_user_update_events([user1_update]) - await sync_config.syncer.events_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.events_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in db_users) == set(u.id for u in [user1_updated, user2, admin_user_info]) # Make sure that the addition of the user resulted in the creation of namespaces - nss, _ = await sync_config.syncer.group_repo.get_namespaces( - user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100) - ) + nss, _ = await dm.syncer.group_repo.get_namespaces(user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100)) assert len(nss) == 1 assert user2.email - assert nss[0].slug == user2.email.split("@")[0] + assert nss[0].path.serialize() == user2.email.split("@")[0] @pytest.mark.asyncio -async def test_admin_events(get_app_configs, admin_user: APIUser) -> None: +async def test_admin_events(get_app_manager, admin_user: APIUser) -> None: kc_api = DummyKeycloakAPI() user1 = UserInfo( id="user-1-id", first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) user2 = UserInfo( @@ -409,12 +324,11 @@ async def test_admin_events(get_app_configs, admin_user: APIUser) -> None: first_name="Jane", last_name="Doe", email="jane.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="jane.doe", created_by="user-2-id", - kind=NamespaceKind.user, underlying_resource_id="user-2-id", + path=NamespacePath.from_strings("jane.doe"), ), ) assert admin_user.id @@ -423,31 +337,27 @@ async def test_admin_events(get_app_configs, admin_user: APIUser) -> None: first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) kc_api.users = get_kc_users([user1, user2, admin_user_info]) - sync_config: SyncConfig - user_repo: UserRepo - sync_config, user_repo = get_app_configs(kc_api) - db_users = await user_repo.get_users(admin_user) - kc_users = [UserInfo.from_kc_user_payload(user) for user in sync_config.kc_api.get_users()] + dm = get_app_manager(kc_api) + dm.kc_api = kc_api + db_users = await dm.syncer.user_repo.get_users(admin_user) + kc_users = [UserInfo.from_kc_user_payload(user) for user in dm.kc_api.get_users()] assert set(u.id for u in kc_users) == set(u.id for u in [user1, user2, admin_user_info]) assert len(db_users) == 1 # listing users add the requesting user if not present - await sync_config.syncer.users_sync(kc_api) + await dm.syncer.users_sync(kc_api) # Make sure that the addition of the users resulted in the creation of namespaces - nss, _ = await sync_config.syncer.group_repo.get_namespaces( - user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100) - ) + nss, _ = await dm.syncer.group_repo.get_namespaces(user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100)) assert len(nss) == 1 assert user2.email - assert nss[0].slug == user2.email.split("@")[0] - db_users = await user_repo.get_users(admin_user) + assert nss[0].path.serialize() == user2.email.split("@")[0] + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in kc_users) == set(u.id for u in db_users) # Add admin events user1_updated = UserInfo(**{**asdict(user1), "last_name": "Renku"}) @@ -455,30 +365,27 @@ async def test_admin_events(get_app_configs, admin_user: APIUser) -> None: [(user2, KeycloakAdminEvent.DELETE), (user1_updated, KeycloakAdminEvent.UPDATE)] ) # Process admin events - await sync_config.syncer.events_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.events_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in [user1_updated, admin_user_info]) == set(u.id for u in db_users) # Make sure that the removal of a user removes the namespace - nss, _ = await sync_config.syncer.group_repo.get_namespaces( - user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100) - ) + nss, _ = await dm.syncer.group_repo.get_namespaces(user=APIUser(id=user2.id), pagination=PaginationRequest(1, 100)) assert len(nss) == 0 @pytest.mark.asyncio -async def test_events_update_error(get_app_configs, admin_user: APIUser) -> None: +async def test_events_update_error(get_app_manager, admin_user: APIUser) -> None: kc_api = DummyKeycloakAPI() user1 = UserInfo( id="user-1-id", first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) user2 = UserInfo( @@ -486,12 +393,11 @@ async def test_events_update_error(get_app_configs, admin_user: APIUser) -> None first_name="Jane", last_name="Doe", email="jane.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="jane.doe", created_by="user-2-id", - kind=NamespaceKind.user, underlying_resource_id="user-2-id", + path=NamespacePath.from_strings("jane.doe"), ), ) assert admin_user.id @@ -500,20 +406,18 @@ async def test_events_update_error(get_app_configs, admin_user: APIUser) -> None first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) kc_api.users = get_kc_users([user1, user2]) - sync_config: SyncConfig - user_repo: UserRepo - sync_config, user_repo = get_app_configs(kc_api) - db_users = await user_repo.get_users(admin_user) - kc_users = [UserInfo.from_kc_user_payload(user) for user in sync_config.kc_api.get_users()] + dm = get_app_manager(kc_api) + dm.kc_api = kc_api + db_users = await dm.syncer.user_repo.get_users(admin_user) + kc_users = [UserInfo.from_kc_user_payload(user) for user in dm.kc_api.get_users()] kc_users.append(admin_user_info) assert set(u.id for u in kc_users) == set(u.id for u in [user1, user2, admin_user_info]) assert len(db_users) == 1 # listing users add the requesting user if not present @@ -521,8 +425,8 @@ async def test_events_update_error(get_app_configs, admin_user: APIUser) -> None assert db_users[0].first_name == admin_user_info.first_name assert db_users[0].last_name == admin_user_info.last_name assert db_users[0].email == admin_user_info.email - await sync_config.syncer.users_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.users_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in kc_users) == set(u.id for u in db_users) # Add admin events user1_updated = UserInfo(**{**asdict(user1), "last_name": "Renku"}) @@ -534,33 +438,32 @@ async def test_events_update_error(get_app_configs, admin_user: APIUser) -> None ) # Process admin events with pytest.raises(ValueError): - await sync_config.syncer.events_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.events_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) # An error occurs in processing an event or between events and none of the events are processed assert set(u.id for u in [user1, user2, admin_user_info]) == set(u.id for u in db_users) # Add admin events without error kc_api.admin_events = get_kc_admin_events([(user1_updated, KeycloakAdminEvent.UPDATE)]) + get_kc_admin_events( [(user2_updated, KeycloakAdminEvent.UPDATE)] ) - await sync_config.syncer.events_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.events_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in [user1_updated, user2_updated, admin_user_info]) == set(u.id for u in db_users) @pytest.mark.asyncio -async def test_removing_non_existent_user(get_app_configs, admin_user: APIUser) -> None: +async def test_removing_non_existent_user(get_app_manager, admin_user: APIUser) -> None: kc_api = DummyKeycloakAPI() user1 = UserInfo( id="user-1-id", first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) non_existent_user = UserInfo( @@ -568,12 +471,11 @@ async def test_removing_non_existent_user(get_app_configs, admin_user: APIUser) first_name="Not", last_name="Exist", email="not.exist@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="not.exist", created_by="noone", - kind=NamespaceKind.user, underlying_resource_id="non-existent-id", + path=NamespacePath.from_strings("not.exist"), ), ) assert admin_user.id @@ -582,36 +484,34 @@ async def test_removing_non_existent_user(get_app_configs, admin_user: APIUser) first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) kc_api.users = get_kc_users([user1, admin_user_info]) - sync_config: SyncConfig - user_repo: UserRepo - sync_config, user_repo = get_app_configs(kc_api) - db_users = await user_repo.get_users(admin_user) - kc_users = [UserInfo.from_kc_user_payload(user) for user in sync_config.kc_api.get_users()] + dm = get_app_manager(kc_api) + dm.kc_api = kc_api + db_users = await dm.syncer.user_repo.get_users(admin_user) + kc_users = [UserInfo.from_kc_user_payload(user) for user in dm.kc_api.get_users()] assert set(u.id for u in kc_users) == set(u.id for u in [user1, admin_user_info]) assert len(db_users) == 1 - await sync_config.syncer.users_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.users_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in kc_users) == set(u.id for u in db_users) # Add admin events kc_api.admin_events = get_kc_admin_events([(non_existent_user, KeycloakAdminEvent.DELETE)]) # Process events - await sync_config.syncer.events_sync(kc_api) - db_users = await user_repo.get_users(admin_user) + await dm.syncer.events_sync(kc_api) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in db_users) == set(u.id for u in [user1, admin_user_info]) @pytest.mark.asyncio async def test_avoiding_namespace_slug_duplicates( - get_app_configs: Callable[..., tuple[SyncConfig, UserRepo]], admin_user: APIUser + get_app_manager: Callable[..., DependencyManager], admin_user: APIUser ) -> None: kc_api = DummyKeycloakAPI() num_users = 10 @@ -621,12 +521,11 @@ async def test_avoiding_namespace_slug_duplicates( first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by=f"user-{i}-id", - kind=NamespaceKind.user, underlying_resource_id=f"user-{i}-id", + path=NamespacePath.from_strings("john.doe"), ), ) for i in range(1, num_users + 1) @@ -637,32 +536,32 @@ async def test_avoiding_namespace_slug_duplicates( first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin"), ), ) kc_api.users = get_kc_users(users + [admin_user_info]) - sync_config, _ = get_app_configs(kc_api) + dm = get_app_manager(kc_api) + dm.kc_api = kc_api original_count = 0 enumerated_count = 0 random_count = 0 - await sync_config.syncer.users_sync(kc_api) + await dm.syncer.users_sync(kc_api) for user in users: api_user = APIUser(id=user.id) - nss, _ = await sync_config.syncer.group_repo.get_namespaces(api_user, PaginationRequest(1, 100)) + nss, _ = await dm.syncer.group_repo.get_namespaces(api_user, PaginationRequest(1, 100)) assert len(nss) == 1 ns = nss[0] assert user.email prefix = user.email.split("@")[0] - if re.match(rf"^{re.escape(prefix)}-[a-z0-9]{{8}}$", ns.slug): + if re.match(rf"^{re.escape(prefix)}-[a-z0-9]{{8}}$", ns.path.serialize()): random_count += 1 - elif re.match(rf"^{re.escape(prefix)}-[1-5]$", ns.slug): + elif re.match(rf"^{re.escape(prefix)}-[1-5]$", ns.path.serialize()): enumerated_count += 1 - elif ns.slug == prefix: + elif ns.path.serialize() == prefix: original_count += 1 assert original_count == 1 assert enumerated_count == 5 @@ -670,19 +569,18 @@ async def test_avoiding_namespace_slug_duplicates( @pytest.mark.asyncio -async def test_authz_admin_sync(get_app_configs, admin_user: APIUser) -> None: +async def test_authz_admin_sync(get_app_manager, admin_user: APIUser) -> None: kc_api = DummyKeycloakAPI() user1 = UserInfo( id="user-1-id", first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) assert admin_user.id @@ -691,37 +589,34 @@ async def test_authz_admin_sync(get_app_configs, admin_user: APIUser) -> None: first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) kc_api.users = get_kc_users([user1, admin_user_info]) kc_api.user_roles = {admin_user_info.id: ["renku-admin"]} - sync_config: SyncConfig - user_repo: UserRepo - sync_config, user_repo = get_app_configs(kc_api) - authz = Authz(sync_config.authz_config) - db_users = await user_repo.get_users(admin_user) - kc_users = [UserInfo.from_kc_user_payload(user) for user in sync_config.kc_api.get_users()] - await sync_config.syncer.users_sync(kc_api) - await sync_admins_from_keycloak(kc_api, authz) - db_users = await user_repo.get_users(admin_user) + dm = get_app_manager(kc_api) + dm.kc_api = kc_api + db_users = await dm.syncer.user_repo.get_users(admin_user) + kc_users = [UserInfo.from_kc_user_payload(user) for user in dm.kc_api.get_users()] + await dm.syncer.users_sync(kc_api) + await sync_admins_from_keycloak(kc_api, dm.authz) + db_users = await dm.syncer.user_repo.get_users(admin_user) assert set(u.id for u in kc_users) == set(u.id for u in db_users) - authz_admin_ids = await authz._get_admin_user_ids() + authz_admin_ids = await dm.authz._get_admin_user_ids() assert set(authz_admin_ids) == {admin_user_info.id} # Make user1 admin kc_api.user_roles[user1.id] = ["renku-admin"] - await sync_admins_from_keycloak(kc_api, authz) - authz_admin_ids = await authz._get_admin_user_ids() + await sync_admins_from_keycloak(kc_api, dm.authz) + authz_admin_ids = await dm.authz._get_admin_user_ids() assert set(authz_admin_ids) == {admin_user_info.id, user1.id} # Remove original admin kc_api.user_roles.pop(admin_user_info.id) - await sync_admins_from_keycloak(kc_api, authz) - authz_admin_ids = await authz._get_admin_user_ids() + await sync_admins_from_keycloak(kc_api, dm.authz) + authz_admin_ids = await dm.authz._get_admin_user_ids() assert set(authz_admin_ids) == {user1.id} @@ -729,9 +624,10 @@ async def get_user_namespace_ids_in_authz(authz: Authz) -> set[str]: """Returns the user""" res = authz.client.ReadRelationships( ReadRelationshipsRequest( + consistency=Consistency(fully_consistent=True), relationship_filter=RelationshipFilter( resource_type=ResourceType.user_namespace.value, optional_relation=_Relation.owner.value - ) + ), ) ) ids = [i.relationship.resource.object_id async for i in res] @@ -739,19 +635,18 @@ async def get_user_namespace_ids_in_authz(authz: Authz) -> set[str]: @pytest.mark.asyncio -async def test_bootstraping_user_namespaces(get_app_configs, admin_user: APIUser): +async def test_bootstraping_user_namespaces(get_app_manager, admin_user: APIUser): kc_api = DummyKeycloakAPI() user1 = UserInfo( id="user-1-id", first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) user2 = UserInfo( @@ -759,21 +654,19 @@ async def test_bootstraping_user_namespaces(get_app_configs, admin_user: APIUser first_name="Jane", last_name="Doe", email="jane.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="jane.doe", created_by="user-2-id", - kind=NamespaceKind.user, underlying_resource_id="user-2-id", + path=NamespacePath.from_strings("jane.doe"), ), ) assert admin_user.id kc_api.users = get_kc_users([user1, user2]) - sync_config: SyncConfig - sync_config, _ = get_app_configs(kc_api) - authz = Authz(sync_config.authz_config) + dm = get_app_manager(kc_api) + dm.kc_api = kc_api db_user_namespace_ids: set[ULID] = set() - async with sync_config.session_maker() as session, session.begin(): + async with dm.config.db.async_session_maker() as session, session.begin(): for user in [user1, user2]: user_orm = UserORM( user.id, @@ -785,28 +678,27 @@ async def test_bootstraping_user_namespaces(get_app_configs, admin_user: APIUser session.add(user_orm) await session.flush() db_user_namespace_ids.add(user_orm.namespace.id) - authz_user_namespace_ids = await get_user_namespace_ids_in_authz(authz) + authz_user_namespace_ids = await get_user_namespace_ids_in_authz(dm.authz) assert len(authz_user_namespace_ids) == 0 - await bootstrap_user_namespaces(sync_config) - authz_user_namespace_ids = await get_user_namespace_ids_in_authz(authz) + await bootstrap_user_namespaces(dm) + authz_user_namespace_ids = await get_user_namespace_ids_in_authz(dm.authz) assert db_user_namespace_ids == authz_user_namespace_ids @pytest.mark.asyncio async def test_fixing_project_group_namespace_relations( - get_app_configs: Callable[..., tuple[SyncConfig, UserRepo]], admin_user: APIUser + get_app_manager: Callable[..., DependencyManager], admin_user: APIUser ): admin_user_info = UserInfo( id=admin_user.id, first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) user1 = UserInfo( @@ -814,12 +706,11 @@ async def test_fixing_project_group_namespace_relations( first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) user2 = UserInfo( @@ -827,32 +718,31 @@ async def test_fixing_project_group_namespace_relations( first_name="Jane", last_name="Doe", email="jane.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="jane.doe", created_by="user-2-id", - kind=NamespaceKind.user, underlying_resource_id="user-2-id", + path=NamespacePath.from_strings("jane.doe"), ), ) user1_api = APIUser(is_admin=False, id=user1.id, access_token="access_token") user2_api = APIUser(is_admin=False, id=user2.id, access_token="access_token") user_roles = {admin_user.id: get_kc_roles(["renku-admin"])} kc_api = DummyKeycloakAPI(users=get_kc_users([admin_user_info, user1, user2]), user_roles=user_roles) - sync_config, user_repo = get_app_configs(kc_api) + dm = get_app_manager(kc_api) + dm.kc_api = kc_api # Sync users - await sync_config.syncer.users_sync(kc_api) - authz = Authz(sync_config.authz_config) + await dm.syncer.users_sync(kc_api) # Create group group_payload = GroupPostRequest(name="group1", slug="group1", description=None) - group = await sync_config.group_repo.insert_group(user1_api, group_payload) + group = await dm.group_repo.insert_group(user1_api, group_payload) # Create project project_payload = UnsavedProject( name="project1", slug="project1", namespace="group1", created_by=user1.id, visibility="private" ) - project = await sync_config.project_repo.insert_project(user1_api, project_payload) + project = await dm.project_repo.insert_project(user1_api, project_payload) # Write the wrong group ID - await authz.client.WriteRelationships( + await dm.authz.client.WriteRelationships( WriteRelationshipsRequest( updates=[ RelationshipUpdate( @@ -875,29 +765,26 @@ async def test_fixing_project_group_namespace_relations( ) ) # Add group member - await sync_config.group_repo.update_group_members(user1_api, "group1", [UnsavedMember(Role.VIEWER, user2.id)]) + await dm.group_repo.update_group_members(user1_api, Slug("group1"), [UnsavedMember(Role.VIEWER, user2.id)]) with pytest.raises(errors.MissingResourceError): - await sync_config.project_repo.get_project(user2_api, project.id) - await fix_mismatched_project_namespace_ids(sync_config) + await dm.project_repo.get_project(user2_api, project.id) + await fix_mismatched_project_namespace_ids(dm) # After the fix you can read the project - await sync_config.project_repo.get_project(user2_api, project.id) + await dm.project_repo.get_project(user2_api, project.id) @pytest.mark.asyncio -async def test_migrate_groups_make_all_public( - get_app_configs: Callable[..., tuple[SyncConfig, UserRepo]], admin_user: APIUser -): +async def test_migrate_groups_make_all_public(get_app_manager: Callable[..., DependencyManager], admin_user: APIUser): admin_user_info = UserInfo( id=admin_user.id, first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) user = UserInfo( @@ -905,27 +792,26 @@ async def test_migrate_groups_make_all_public( first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) user_api = APIUser(is_admin=False, id=user.id, access_token="access_token") anon_user_api = APIUser(is_admin=False) user_roles = {admin_user.id: get_kc_roles(["renku-admin"])} kc_api = DummyKeycloakAPI(users=get_kc_users([admin_user_info, user]), user_roles=user_roles) - sync_config, _ = get_app_configs(kc_api) + dm = get_app_manager(kc_api) + dm.kc_api = kc_api # Sync users - await sync_config.syncer.users_sync(kc_api) - authz = Authz(sync_config.authz_config) + await dm.syncer.users_sync(kc_api) # Create group group_payload = GroupPostRequest(name="group1", slug="group1", description=None) - group = await sync_config.group_repo.insert_group(user_api, group_payload) + group = await dm.group_repo.insert_group(user_api, group_payload) # Remove the public viewer relations - await authz.client.DeleteRelationships( + await dm.authz.client.DeleteRelationships( DeleteRelationshipsRequest( relationship_filter=RelationshipFilter( resource_type=ResourceType.group.value, optional_relation=_Relation.public_viewer.value @@ -934,12 +820,12 @@ async def test_migrate_groups_make_all_public( ) with pytest.raises(errors.MissingResourceError): - group_members = await sync_config.group_repo.get_group_members(user=anon_user_api, slug=group.slug) + group_members = await dm.group_repo.get_group_members(user=anon_user_api, slug=Slug(group.slug)) - await migrate_groups_make_all_public(sync_config) + await migrate_groups_make_all_public(dm) # After the migration, the group is public - group_members = await sync_config.group_repo.get_group_members(user=anon_user_api, slug=group.slug) + group_members = await dm.group_repo.get_group_members(user=anon_user_api, slug=Slug(group.slug)) assert len(group_members) == 1 assert group_members[0].id == "user-1-id" assert group_members[0].role.value == "owner" @@ -947,19 +833,18 @@ async def test_migrate_groups_make_all_public( @pytest.mark.asyncio async def test_migrate_user_namespaces_make_all_public( - get_app_configs: Callable[..., tuple[SyncConfig, UserRepo]], admin_user: APIUser + get_app_manager: Callable[..., DependencyManager], admin_user: APIUser ): admin_user_info = UserInfo( id=admin_user.id, first_name=admin_user.first_name, last_name=admin_user.last_name, email=admin_user.email, - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="admin-user", created_by=admin_user.id, - kind=NamespaceKind.user, underlying_resource_id=admin_user.id, + path=NamespacePath.from_strings("admin-user"), ), ) user = UserInfo( @@ -967,23 +852,22 @@ async def test_migrate_user_namespaces_make_all_public( first_name="John", last_name="Doe", email="john.doe@gmail.com", - namespace=Namespace( + namespace=UserNamespace( id=ULID(), - slug="john.doe", created_by="user-1-id", - kind=NamespaceKind.user, underlying_resource_id="user-1-id", + path=NamespacePath.from_strings("john.doe"), ), ) anon_user_api = APIUser(is_admin=False) user_roles = {admin_user.id: get_kc_roles(["renku-admin"])} kc_api = DummyKeycloakAPI(users=get_kc_users([admin_user_info, user]), user_roles=user_roles) - sync_config, _ = get_app_configs(kc_api) + dm = get_app_manager(kc_api) + dm.kc_api = kc_api # Sync users - await sync_config.syncer.users_sync(kc_api) - authz = Authz(sync_config.authz_config) + await dm.syncer.users_sync(kc_api) # Remove the public viewer relations - await authz.client.DeleteRelationships( + await dm.authz.client.DeleteRelationships( DeleteRelationshipsRequest( relationship_filter=RelationshipFilter( resource_type=ResourceType.user_namespace.value, optional_relation=_Relation.public_viewer.value @@ -992,92 +876,12 @@ async def test_migrate_user_namespaces_make_all_public( ) with pytest.raises(errors.MissingResourceError): - await sync_config.group_repo.get_namespace_by_slug(user=anon_user_api, slug="john.doe") + await dm.group_repo.get_namespace_by_slug(user=anon_user_api, slug=Slug("john.doe")) - await migrate_user_namespaces_make_all_public(sync_config) + await migrate_user_namespaces_make_all_public(dm) # After the migration, the user namespace is public - ns = await sync_config.group_repo.get_namespace_by_slug(user=anon_user_api, slug="john.doe") - assert ns.slug == "john.doe" + ns = await dm.group_repo.get_namespace_by_slug(user=anon_user_api, slug=Slug("john.doe")) + assert ns.path.serialize() == "john.doe" assert ns.kind.value == "user" assert ns.created_by == user.id - - -@pytest.mark.asyncio -async def test_migrate_storages_v2(get_app_configs: Callable[..., tuple[SyncConfig, UserRepo]], admin_user: APIUser): - admin_user_info = UserInfo( - id=admin_user.id, - first_name=admin_user.first_name, - last_name=admin_user.last_name, - email=admin_user.email, - namespace=Namespace( - id=ULID(), - slug="admin-user", - created_by=admin_user.id, - kind=NamespaceKind.user, - underlying_resource_id=admin_user.id, - ), - ) - user = UserInfo( - id="user-1-id", - first_name="Jane", - last_name="Doe", - email="jane.doe@gmail.com", - namespace=Namespace( - id=ULID(), - slug="jane.doe", - created_by="user-1-id", - kind=NamespaceKind.user, - underlying_resource_id="user-1-id", - ), - ) - user_api = APIUser(is_admin=False, id=user.id, access_token="access_token") - user_roles = {admin_user.id: get_kc_roles(["renku-admin"])} - kc_api = DummyKeycloakAPI(users=get_kc_users([admin_user_info, user]), user_roles=user_roles) - sync_config, _ = get_app_configs(kc_api) - # Sync users - await sync_config.syncer.users_sync(kc_api) - - # Create a project and a storage_v2 attached to it - project_payload = UnsavedProject( - name="project-1", slug="project-1", namespace=user.namespace.slug, created_by=user.id, visibility="private" - ) - project = await sync_config.project_repo.insert_project(user_api, project_payload) - unsaved_storage = UnsavedCloudStorage.from_url( - storage_url="s3://my-bucket", - name="storage-1", - readonly=True, - project_id=str(project.id), - target_path="my_data", - ) - storage_orm = CloudStorageORM.load(unsaved_storage) - async with sync_config.session_maker() as session, session.begin(): - session.add(storage_orm) - storage_v2 = storage_orm.dump() - - await migrate_storages_v2_to_data_connectors(sync_config) - - # After the migration, there is a new data connector - data_connector_repo = sync_config.data_connector_migration_tool.data_connector_repo - data_connectors, data_connectors_count = await data_connector_repo.get_data_connectors( - user=user_api, - pagination=PaginationRequest(1, 100), - ) - assert data_connectors is not None - assert data_connectors_count == 1 - data_connector = data_connectors[0] - assert data_connector.name == storage_v2.name - assert data_connector.storage.storage_type == storage_v2.storage_type - assert data_connector.storage.readonly == storage_v2.readonly - assert data_connector.storage.source_path == storage_v2.source_path - assert data_connector.storage.target_path == storage_v2.target_path - assert data_connector.created_by == user.id - - data_connector_project_link_repo = sync_config.data_connector_migration_tool.data_connector_project_link_repo - links = await data_connector_project_link_repo.get_links_to(user=user_api, project_id=project.id) - assert links is not None - assert len(links) == 1 - link = links[0] - assert link.project_id == project.id - assert link.data_connector_id == data_connector.id - assert link.created_by == user.id diff --git a/test/bases/renku_data_services/data_tasks/test_taskman.py b/test/bases/renku_data_services/data_tasks/test_taskman.py new file mode 100644 index 000000000..7080a1b13 --- /dev/null +++ b/test/bases/renku_data_services/data_tasks/test_taskman.py @@ -0,0 +1,123 @@ +"""Tests for taskman module.""" + +import asyncio +from collections.abc import Callable, Coroutine +from dataclasses import dataclass +from datetime import datetime, timedelta +from typing import Any + +import pytest + +from renku_data_services.app_config import logging +from renku_data_services.data_tasks.taskman import TaskDefininions, TaskManager, _TaskContext + +logger = logging.getLogger(__name__) + + +async def task1(wait_time: float, body: Callable[[], Any] = lambda: logger.info("hello world")) -> None: + await asyncio.sleep(wait_time) + body() + + +async def task2(wait_time: float, body: Callable[[], Any] = lambda: logger.info("hello world")) -> None: + while True: + body() + await asyncio.sleep(wait_time) + + +def test_task_definition() -> None: + td = TaskDefininions({"test1": lambda: task1(1)}) + tds = list(td.tasks) + assert len(tds) == 1 + for name, tf in td.tasks: + assert name == "test1" + assert isinstance(tf(), Coroutine) + + +def test_task_context() -> None: + started = datetime(2025, 4, 21, 15, 30, 0) + delta = timedelta(seconds=154) + tc = _TaskContext(name="test1", task=..., started=started, restarts=0) + + assert tc.restarts == 0 + tc.inc_restarts() + assert tc.restarts == 1 + + td = tc.running_time(ref=started + delta) + assert td == delta + + +@dataclass +class State: + counter: int = 0 + throw_on: int = -1 + + def set(self, n: int) -> None: + self.counter = n + if n == self.throw_on: + raise Exception(f"Error on {n}") + + def inc(self) -> None: + self.set(self.counter + 1) + + +async def cancel(tm: TaskManager, name: str, max_wait: float) -> None: + tj = tm.cancel(name) + if tj is not None: + await tj.join(max_wait) + else: + raise Exception(f"Task {name} not found") + + +@pytest.mark.asyncio +async def test_simple_task_run() -> None: + tm = TaskManager(max_retry_wait_seconds=10) + state = State() + td = TaskDefininions.single("task1", lambda: task1(0.5, state.inc)) + tm.start_all(td) + assert tm.get_task_view("task1") is not None + await tm.get_task_join("task1").join(max_wait=1) + assert tm.get_task_view("task1") is None + assert state.counter == 1 + + +@pytest.mark.asyncio +async def test_infinite_task() -> None: + tm = TaskManager(max_retry_wait_seconds=10) + state = State() + td = TaskDefininions.single("task", lambda: task2(0.1, state.inc)) + tm.start_all(td) + assert tm.get_task_view("task") is not None + await asyncio.sleep(0.5) + assert tm.get_task_view("task") is not None + assert state.counter > 3 + await cancel(tm, "task", 1) + + +@pytest.mark.asyncio +async def test_retry_on_error() -> None: + tm = TaskManager(max_retry_wait_seconds=1) + state = State(throw_on=2) + td = TaskDefininions.single("task", lambda: task2(0.1, state.inc)) + tm.start_all(td) + await asyncio.sleep(2) + assert tm.get_task_view("task") is not None + assert state.counter > 2 + tv = tm.get_task_view("task") + assert tv is not None + assert tv.restarts > 0 + await cancel(tm, "task", 1) + + +@pytest.mark.asyncio +async def test_task_cancel() -> None: + tm = TaskManager(max_retry_wait_seconds=1) + state = State() + td = TaskDefininions.single("task", lambda: task2(0.1, state.inc)) + tm.start_all(td) + await asyncio.sleep(0.5) + assert tm.get_task_view("task") is not None + assert tm.cancel("task") is not None + assert tm.cancel("bla") is None + await tm.get_task_join("task").join(1) + assert tm.get_task_view("task") is None diff --git a/test/components/renku_data_services/app_config/test_logging.py b/test/components/renku_data_services/app_config/test_logging.py new file mode 100644 index 000000000..7937b9e2e --- /dev/null +++ b/test/components/renku_data_services/app_config/test_logging.py @@ -0,0 +1,119 @@ +"""Tests for the app_config.logging module.""" + +import json +import logging as ll +from logging import LogRecord + +from renku_data_services.app_config.logging import ( + Config, + LogFormatStyle, + _RenkuJsonFormatter, + _RenkuLogFormatter, + _RequestIdFilter, + set_request_id, +) + +logger = ll.getLogger(__name__) + +sample_record = LogRecord( + name="a.b.c", + level=ll.INFO, + lineno=23, + msg="this is a msg", + pathname="a/b.py", + args=None, + exc_info=None, +) +_RequestIdFilter().filter(sample_record) + + +class TestHandler(ll.Handler): + def __init__(self) -> None: + ll.Handler.__init__(self) + self.records = [] + + def emit(self, record) -> None: + self.records.append(record) + + def reset(self) -> None: + self.records = [] + + +def make_logger(name: str, level: int) -> tuple[ll.Logger, TestHandler]: + logger = ll.Logger(name, level) + hdl = TestHandler() + logger.addHandler(hdl) + logger.addFilter(_RequestIdFilter()) + return logger, hdl + + +def test_json_formatter_creates_json() -> None: + fmt = _RenkuJsonFormatter() + s = fmt.format(sample_record) + js = json.loads(s) + + assert js["timestamp"] is not None + assert js["level"] == "INFO" + assert js["name"] == "a.b.c" + assert js["pathname"] == "a/b.py" + assert js["module"] == "b" + + +def test_plain_formatter() -> None: + fmt = _RenkuLogFormatter() + s = fmt.format(sample_record) + assert "INFO" in s + assert "a.b.c" in s + assert sample_record.getMessage() in s + + +def test_default_config(monkeysession) -> None: + for level in ll._nameToLevel: + monkeysession.setenv(f"{level}_LOGGING", "") + + cfg = Config.from_env() + assert cfg.app_level == ll.INFO + assert cfg.root_level == ll.WARNING + assert cfg.format_style == LogFormatStyle.plain + assert cfg.override_levels == {} + + +def test_config_from_env(monkeysession) -> None: + for level in ll._nameToLevel: + monkeysession.setenv(f"{level}_LOGGING", "") + + monkeysession.setenv("DEBUG_LOGGING", "renku_data_services.test") + monkeysession.setenv("LOG_APP_LEVEL", "WARN") + monkeysession.setenv("LOG_FORMAT_STYLE", "Json") + + cfg = Config.from_env() + assert cfg.app_level == ll.WARNING + assert cfg.root_level == ll.WARNING + assert cfg.format_style == LogFormatStyle.json + assert cfg.override_levels == {10: set(["renku_data_services.test"])} + + +def test_log_with_request_id() -> None: + logger, hdl = make_logger("test.logger", ll.INFO) + set_request_id("req_id_1") + logger.info("hello world") + assert len(hdl.records) == 1 + record = hdl.records[0] + assert record.request_id == "req_id_1" + assert record.getMessage() == "hello world" + + +def test_log_request_id_json() -> None: + logger, hdl = make_logger("test.logger", ll.INFO) + set_request_id("test_req_2") + logger.info("hello world") + assert len(hdl.records) == 1 + record = hdl.records[0] + js = json.loads(_RenkuJsonFormatter().format(record)) + assert js["request_id"] == "test_req_2" + + +def test_config_update_levels() -> None: + cfg1 = Config(override_levels={10: set(["a", "b"]), 20: set(["c"])}) + cfg1.update_override_levels({10: set(["c"]), 20: set(["b"])}) + assert cfg1.override_levels == {10: set(["a", "c"]), 20: set(["b"])} diff --git a/test/components/renku_data_services/authz/test_authorization.py b/test/components/renku_data_services/authz/test_authorization.py index ea7f4d261..861ee18fd 100644 --- a/test/components/renku_data_services/authz/test_authorization.py +++ b/test/components/renku_data_services/authz/test_authorization.py @@ -8,13 +8,15 @@ ) from ulid import ULID -from renku_data_services.app_config import Config -from renku_data_services.authz.authz import ResourceType, _AuthzConverter +from renku_data_services.authz.authz import _AuthzConverter from renku_data_services.authz.models import Member, Role, Scope, Visibility from renku_data_services.base_models import APIUser +from renku_data_services.base_models.core import NamespacePath, ResourceType +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.errors import errors from renku_data_services.migrations.core import run_migrations_for_app -from renku_data_services.namespace.models import Namespace, NamespaceKind +from renku_data_services.namespace.models import UserNamespace +from renku_data_services.project import constants as project_constants from renku_data_services.project.models import Project admin_user = APIUser(is_admin=True, id="admin-id", access_token="some-token", full_name="admin") # nosec B106 @@ -24,9 +26,9 @@ @pytest_asyncio.fixture -async def bootstrap_admins(app_config_instance: Config, event_loop) -> None: +async def bootstrap_admins(app_manager_instance: DependencyManager, event_loop) -> None: run_migrations_for_app("common") - authz = app_config_instance.authz + authz = app_manager_instance.authz admins = [admin_user] rels: list[RelationshipUpdate] = [] for admin in admins: @@ -43,24 +45,26 @@ async def bootstrap_admins(app_config_instance: Config, event_loop) -> None: @pytest.mark.asyncio @pytest.mark.parametrize("public_project", [True, False]) -async def test_adding_deleting_project(app_config_instance: Config, bootstrap_admins, public_project: bool) -> None: +async def test_adding_deleting_project( + app_manager_instance: DependencyManager, bootstrap_admins, public_project: bool +) -> None: project_owner = regular_user1 assert project_owner.id - authz = app_config_instance.authz + authz = app_manager_instance.authz project_id = ULID() project = Project( id=project_id, name=project_id, slug="slug", - namespace=Namespace( - "namespace", - "namespace", - NamespaceKind.user, + namespace=UserNamespace( + id="namespace", created_by=project_owner.id, underlying_resource_id=project_owner.id, + path=NamespacePath.from_strings("namespace"), ), visibility=Visibility.PUBLIC if public_project else Visibility.PRIVATE, created_by=project_owner.id, + secrets_mount_directory=project_constants.DEFAULT_SESSION_SECRETS_MOUNT_DIR, ) authz_changes = authz._add_project(project) await authz.client.WriteRelationships(authz_changes.apply) @@ -93,23 +97,22 @@ async def test_adding_deleting_project(app_config_instance: Config, bootstrap_ad @pytest.mark.parametrize("public_project", [True, False]) @pytest.mark.parametrize("granted_role", [Role.VIEWER, Role.EDITOR, Role.OWNER]) async def test_granting_access( - app_config_instance: Config, bootstrap_admins, public_project: bool, granted_role: Role + app_manager_instance: DependencyManager, bootstrap_admins, public_project: bool, granted_role: Role ) -> None: project_owner = regular_user1 assert project_owner.id assert regular_user2.id - authz = app_config_instance.authz + authz = app_manager_instance.authz project_id = ULID() project = Project( id=project_id, name=project_id, slug="slug", - namespace=Namespace( - "namespace", - "namespace", - NamespaceKind.user, + namespace=UserNamespace( + id="namespace", created_by=project_owner.id, underlying_resource_id=project_owner.id, + path=NamespacePath.from_strings("namespace"), ), visibility=Visibility.PUBLIC if public_project else Visibility.PRIVATE, created_by=project_owner.id, @@ -139,22 +142,23 @@ async def test_granting_access( @pytest.mark.asyncio @pytest.mark.parametrize("public_project", [True, False]) -async def test_listing_users_with_access(app_config_instance: Config, public_project: bool, bootstrap_admins) -> None: +async def test_listing_users_with_access( + app_manager_instance: DependencyManager, public_project: bool, bootstrap_admins +) -> None: project_owner = regular_user1 assert project_owner.id assert regular_user2.id - authz = app_config_instance.authz + authz = app_manager_instance.authz project1_id = ULID() project1 = Project( id=project1_id, name=str(project1_id), slug=str(project1_id), - namespace=Namespace( - project_owner.id, - project_owner.id, - NamespaceKind.user, + namespace=UserNamespace( + id=project_owner.id, created_by=project_owner.id, underlying_resource_id=project_owner.id, + path=[project_owner.id], ), visibility=Visibility.PUBLIC if public_project else Visibility.PRIVATE, created_by=project_owner.id, @@ -164,12 +168,11 @@ async def test_listing_users_with_access(app_config_instance: Config, public_pro id=project2_id, name=str(project2_id), slug=str(project2_id), - namespace=Namespace( - regular_user2.id, - regular_user2.id, - NamespaceKind.user, + namespace=UserNamespace( + id=regular_user2.id, created_by=regular_user2.id, underlying_resource_id=regular_user2.id, + path=[regular_user2.id], ), visibility=Visibility.PRIVATE, created_by=regular_user2.id, @@ -187,8 +190,8 @@ async def test_listing_users_with_access(app_config_instance: Config, public_pro @pytest.mark.asyncio -async def test_listing_projects_with_access(app_config_instance: Config, bootstrap_admins) -> None: - authz = app_config_instance.authz +async def test_listing_projects_with_access(app_manager_instance: DependencyManager, bootstrap_admins) -> None: + authz = app_manager_instance.authz public_project_id = ULID() private_project_id1 = ULID() private_project_id2 = ULID() @@ -198,12 +201,11 @@ async def test_listing_projects_with_access(app_config_instance: Config, bootstr private_project_id2_str = str(private_project_id2) project_owner = regular_user1 - namespace = Namespace( - project_owner.id, - project_owner.id, - NamespaceKind.user, + namespace = UserNamespace( + id=project_owner.id, created_by=project_owner.id, underlying_resource_id=project_owner.id, + path=[project_owner.id], ) assert project_owner.id assert regular_user2.id @@ -306,3 +308,65 @@ async def test_listing_projects_with_access(app_config_instance: Config, bootstr assert private_project_id1_str not in set( await authz.resources_with_permission(admin_user, admin_user.id, ResourceType.project, Scope.DELETE) ) + + +@pytest.mark.asyncio +async def test_listing_non_public_projects(app_manager_instance: DependencyManager, bootstrap_admins) -> None: + authz = app_manager_instance.authz + public_project_id = ULID() + private_project_id1 = ULID() + private_project_id2 = ULID() + + public_project_id_str = str(public_project_id) + private_project_id1_str = str(private_project_id1) + private_project_id2_str = str(private_project_id2) + + namespace = UserNamespace( + id=ULID(), + created_by=str(regular_user1.id), + underlying_resource_id=str(ULID()), + path=NamespacePath.from_strings("ns-121"), + ) + assert regular_user1.id + assert regular_user2.id + public_project = Project( + id=public_project_id, + name=public_project_id_str, + slug=public_project_id_str, + namespace=namespace, + visibility=Visibility.PUBLIC, + created_by=regular_user1.id, + ) + private_project1 = Project( + id=private_project_id1, + name=private_project_id1_str, + slug=private_project_id1_str, + namespace=namespace, + visibility=Visibility.PRIVATE, + created_by=regular_user1.id, + ) + private_project2 = Project( + id=private_project_id2, + name=private_project_id2_str, + slug=private_project_id2_str, + namespace=namespace, + visibility=Visibility.PRIVATE, + created_by=regular_user2.id, + ) + for p in [public_project, private_project1, private_project2]: + changes = authz._add_project(p) + await authz.client.WriteRelationships(changes.apply) + + ids_user1 = await authz.resources_with_permission( + admin_user, regular_user1.id, ResourceType.project, Scope.NON_PUBLIC_READ + ) + ids_user2 = await authz.resources_with_permission( + admin_user, regular_user2.id, ResourceType.project, Scope.NON_PUBLIC_READ + ) + assert private_project_id1_str in set(ids_user1) + assert private_project_id2_str not in set(ids_user1) + assert public_project_id_str not in set(ids_user1) + + assert private_project_id2_str in set(ids_user2) + assert private_project_id1_str not in set(ids_user2) + assert public_project_id_str not in set(ids_user2) diff --git a/test/components/renku_data_services/authz/test_schemas.py b/test/components/renku_data_services/authz/test_schemas.py index 57a49b0c7..760d1fb37 100644 --- a/test/components/renku_data_services/authz/test_schemas.py +++ b/test/components/renku_data_services/authz/test_schemas.py @@ -300,6 +300,219 @@ def v2_schema() -> SpiceDBSchema: ) +@pytest.fixture +def v5_schema() -> SpiceDBSchema: + return SpiceDBSchema( + schemas._v5, + relationships=[ + "project:p1#owner@user:u1", + "project:p1#public_viewer@user:*", + "project:p1#public_viewer@anonymous_user:*", + "project:p2#owner@user:u1", + "project:p3#editor@user:u2", + "project:p4#project_namespace@group:g1", + "group:g1#editor@user:u1", + "project:p5#viewer@user:u3", + "project:p6#owner@user:u4", + "project:p6#public_viewer@user:*", + "project:p6#public_viewer@anonymous_user:*", + ], + assertions={ + "assertTrue": [ + "project:p2#non_public_read@user:u1", + "project:p3#non_public_read@user:u2", + "project:p4#non_public_read@user:u1", + "project:p5#non_public_read@user:u3", + ], + "assertFalse": [ + "project:p1#non_public_read@user:u1", + "project:p1#non_public_read@user:u2", + "project:p1#non_public_read@user:u3", + "project:p2#non_public_read@user:u2", + "project:p2#non_public_read@user:u3", + "project:p3#non_public_read@user:u1", + "project:p3#non_public_read@user:u3", + "project:p4#non_public_read@user:u2", + "project:p4#non_public_read@user:u3", + "project:p5#non_public_read@user:u1", + "project:p5#non_public_read@user:u2", + "project:p6#non_public_read@user:u4", + ], + }, + validation={}, + ) + + +@pytest.fixture +def v6_schema() -> SpiceDBSchema: + return SpiceDBSchema( + schemas._v6, + relationships=[ + # there is an an admin + "platform:renku#admin@user:admin1", + # user namespaces + "user_namespace:user1#owner@user:user1", + "user_namespace:user2#owner@user:user2", + # project1 is public and owned by user1 + "project:project1#owner@user:user1", + "project:project1#project_namespace@user_namespace:user1", + "project:project1#public_viewer@user:*", + "project:project1#public_viewer@anonymous_user:*", + "project:project1#project_platform@platform:renku", + # project2 is private, in group1 which is also private + "project:project2#owner@user:user2", + "project:project2#project_namespace@group:group1", + # project2 has other generic members + "project:project2#viewer@user:project2_viewer", + "project:project2#editor@user:project2_editor", + "project:project2#project_platform@platform:renku", + # user2 is owner of group1 + "group:group1#owner@user:user2", + # group1 has other generic members + "group:group1#owner@user:group1_owner", + "group:group1#editor@user:group1_editor", + "group:group1#viewer@user:group1_viewer", + # dc1 is owned by project1 + "data_connector:dc1#data_connector_namespace@project:project1", + "data_connector:dc1#data_connector_platform@platform:renku", + # dc2 is owned by group1 + "data_connector:dc2#data_connector_namespace@group:group1", + "data_connector:dc2#data_connector_platform@platform:renku", + # dc3 is owned by user1 and is private + "data_connector:dc3#data_connector_namespace@user_namespace:user1", + "data_connector:dc3#data_connector_platform@platform:renku", + # dc4 is owned by user1 and is public + "data_connector:dc4#data_connector_namespace@user_namespace:user1", + "data_connector:dc4#data_connector_platform@platform:renku", + "data_connector:dc4#public_viewer@user:*", + "data_connector:dc4#public_viewer@anonymous_user:*", + ], + assertions={ + "assertTrue": [ + # admins can do everything to all data connectors + "data_connector:dc1#delete@user:admin1", + "data_connector:dc2#delete@user:admin1", + "data_connector:dc3#delete@user:admin1", + "data_connector:dc4#delete@user:admin1", + "data_connector:dc1#write@user:admin1", + "data_connector:dc2#write@user:admin1", + "data_connector:dc3#write@user:admin1", + "data_connector:dc4#write@user:admin1", + "data_connector:dc1#read@user:admin1", + "data_connector:dc2#read@user:admin1", + "data_connector:dc3#read@user:admin1", + "data_connector:dc4#read@user:admin1", + # user1 can do everything on dc1 since it is owned by the project that user1 owns + "data_connector:dc1#delete@user:user1", + "data_connector:dc1#write@user:user1", + "data_connector:dc1#read@user:user1", + # user1 can read dc3 because it is owned by user1 + "data_connector:dc3#delete@user:user1", + "data_connector:dc3#write@user:user1", + "data_connector:dc3#read@user:user1", + # user1 can read dc4 because it is owned by user1 + "data_connector:dc4#delete@user:user1", + "data_connector:dc4#write@user:user1", + "data_connector:dc4#read@user:user1", + # user2 has full access on dc2 because they own the group that owns the dc + "data_connector:dc2#delete@user:user2", + "data_connector:dc2#write@user:user2", + "data_connector:dc2#read@user:user2", + # user2 has read access on dc4 because the dc is public + "data_connector:dc4#read@user:user2", + # anonymous user checks + "data_connector:dc4#read@user:ANON", + "data_connector:dc4#read@anonymous_user:ANON", + ], + "assertFalse": [ + # user1 has no access to dc2 since the dc is owned by group1 which is private + # and user1 has no affiliation with group1 + "data_connector:dc2#delete@user:user1", + "data_connector:dc2#write@user:user1", + "data_connector:dc2#read@user:user1", + # user2 has no access to dc1 because the dc is not public + # and user2 has no access to the project that owns the dc + "data_connector:dc1#read@user:user2", + # user2 has no edit or write access to dc1 + "data_connector:dc1#delete@user:user2", + "data_connector:dc1#write@user:user2", + # user2 has no access to dc3 because it is owned by user1 and is private + "data_connector:dc3#delete@user:user2", + "data_connector:dc3#write@user:user2", + "data_connector:dc3#read@user:user2", + # user2 does not have write or delete permissions on dc4 + "data_connector:dc4#delete@user:user2", + "data_connector:dc4#write@user:user2", + # user2 can read dc1 because it is owned by a public project + # anonymous user checks + "data_connector:dc1#read@user:ANON", + "data_connector:dc2#read@user:ANON", + "data_connector:dc3#read@user:ANON", + "data_connector:dc1#read@anonymous_user:ANON", + "data_connector:dc2#read@anonymous_user:ANON", + "data_connector:dc3#read@anonymous_user:ANON", + ], + }, + ) + + +@pytest.fixture +def v7_schema() -> SpiceDBSchema: + return SpiceDBSchema( + schemas._v7, + relationships=[ + # public project p1, owner=u1 + "project:p1#owner@user:u1", + "project:p1#public_viewer@user:*", + "project:p1#public_viewer@anonymous_user:*", + "project:p1#editor@user:u11", + "project:p1#viewer@user:u12", + # private project p2, owner=u2 + "project:p2#owner@user:u2", + "project:p2#editor@user:u21", + "project:p2#viewer@user:u22", + # private project p3, owner=g1 (group), group owner=u3 + "project:p3#project_namespace@group:g1", + "group:g1#owner@user:u3", + "group:g1#editor@user:u4", + "group:g1#viewer@user:u5", + ], + assertions={ + "assertTrue": [ + "project:p1#exclusive_owner@user:u1", + "project:p1#exclusive_editor@user:u11", + "project:p1#exclusive_member@user:u1", + "project:p1#exclusive_member@user:u11", + "project:p1#exclusive_member@user:u12", + "project:p2#exclusive_owner@user:u2", + "project:p2#exclusive_editor@user:u21", + "project:p2#exclusive_member@user:u2", + "project:p2#exclusive_member@user:u21", + "project:p2#exclusive_member@user:u22", + "project:p3#exclusive_owner@user:u3", + "group:g1#exclusive_owner@user:u3", + "project:p3#exclusive_editor@user:u4", + "group:g1#exclusive_editor@user:u4", + "project:p3#exclusive_member@user:u5", + "project:p3#exclusive_member@user:u4", + "project:p3#exclusive_member@user:u3", + "group:g1#exclusive_member@user:u5", + "group:g1#exclusive_member@user:u4", + "group:g1#exclusive_member@user:u3", + ], + "assertFalse": [ + "project:p1#exclusive_owner@user:u2", + "project:p1#exclusive_editor@user:u1", + "project:p1#exclusive_editor@user:u12", + "project:p2#exclusive_owner@user:u1", + "project:p2#exclusive_editor@user:u2", + "project:p2#exclusive_editor@user:u22", + ], + }, + validation={}, + ) + + def test_v1_schema(tmp_path: Path, v1_schema: SpiceDBSchema) -> None: validation_file = tmp_path / "validate.yaml" v1_schema.to_yaml(validation_file) @@ -310,3 +523,21 @@ def test_v2_schema(tmp_path: Path, v2_schema: SpiceDBSchema) -> None: validation_file = tmp_path / "validate.yaml" v2_schema.to_yaml(validation_file) check_call(["zed", "validate", validation_file.as_uri()]) + + +def test_v5_schema(tmp_path: Path, v5_schema: SpiceDBSchema) -> None: + validation_file = tmp_path / "validate.yaml" + v5_schema.to_yaml(validation_file) + check_call(["zed", "validate", validation_file.as_uri()]) + + +def test_v6_schema(tmp_path: Path, v6_schema: SpiceDBSchema) -> None: + validation_file = tmp_path / "validate.yaml" + v6_schema.to_yaml(validation_file) + check_call(["zed", "validate", validation_file.as_uri()]) + + +def test_v7_schema(tmp_path: Path, v7_schema: SpiceDBSchema) -> None: + validation_file = tmp_path / "validate.yaml" + v7_schema.to_yaml(validation_file) + check_call(["zed", "validate", validation_file.as_uri()]) diff --git a/test/components/renku_data_services/base_models/test_nel.py b/test/components/renku_data_services/base_models/test_nel.py new file mode 100644 index 000000000..f9ca586f1 --- /dev/null +++ b/test/components/renku_data_services/base_models/test_nel.py @@ -0,0 +1,61 @@ +"""Tests for non empty list.""" + +from renku_data_services.base_models.nel import Nel + + +def test_nel() -> None: + value = Nel(1) + assert value.to_list() == [1] + + value = Nel(1, [2, 3]) + assert value.to_list() == [1, 2, 3] + + value = Nel.of(1, 2, 3, "a") + assert value.to_list() == [1, 2, 3, "a"] + + value = Nel.of(1, 2, 3, 4) + assert value.to_list() == [1, 2, 3, 4] + assert value.to_set() == set([1, 2, 3, 4]) + + value = Nel.of(1, 2).append(Nel.of(3, 4)) + assert value.to_list() == [1, 2, 3, 4] + + nel = Nel.of(1, 2) + value = nel.append([]) + assert value is nel + + value = nel.append([3, 4]) + assert value.to_list() == [1, 2, 3, 4] + + nel: Nel[int] | None = Nel.from_list([]) + assert nel is None + + nel = Nel.from_list([1, 2, 3]) + assert nel == Nel.of(1, 2, 3) + + +def test_iteration() -> None: + nel = Nel.of(1, 2, 3, 4, 5) + lst1 = [e for e in nel] + lst2 = [e for e in nel] + assert lst2 == nel.to_list() + assert lst1 == lst2 + + lst3 = [e for e in Nel.of(1)] + assert lst3 == [1] + + assert len(nel) == 5 + assert nel[0] == 1 + assert nel[1] == 2 + + assert set(nel) == set([1, 2, 3, 4, 5]) + + lst = [0, 1] + lst.extend(nel) + assert lst == [0, 1, 1, 2, 3, 4, 5] + + +def test_mk_string() -> None: + nel = Nel.of(1, 2, 3, 4, 5) + assert nel.mk_string(",") == "1,2,3,4,5" + assert Nel.of(1).mk_string(",") == "1" diff --git a/test/components/renku_data_services/base_models/test_slugs.py b/test/components/renku_data_services/base_models/test_slugs.py index e48d9553d..22bfed97b 100644 --- a/test/components/renku_data_services/base_models/test_slugs.py +++ b/test/components/renku_data_services/base_models/test_slugs.py @@ -38,7 +38,6 @@ def test_slug_generation_from_invalid_name(input: str, expected: str) -> None: ], ) def test_valid_slug(input: str, expected: str) -> None: - assert Slug(input).value == expected assert Slug.from_name(input).value == expected diff --git a/test/components/renku_data_services/connected_services/test_encryption.py b/test/components/renku_data_services/connected_services/test_encryption.py index 4e9b7e9af..3c345467f 100644 --- a/test/components/renku_data_services/connected_services/test_encryption.py +++ b/test/components/renku_data_services/connected_services/test_encryption.py @@ -3,20 +3,20 @@ import pytest from sqlalchemy import select -from renku_data_services.app_config import Config from renku_data_services.base_models import APIUser from renku_data_services.connected_services import apispec from renku_data_services.connected_services import orm as schemas +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from renku_data_services.utils.cryptography import decrypt_string @pytest.mark.asyncio async def test_token_encryption( - app_config_instance: Config, + app_manager_instance: DependencyManager, ) -> None: run_migrations_for_app("common") - connected_services_repo = app_config_instance.connected_services_repo + connected_services_repo = app_manager_instance.connected_services_repo token = dict(access_token="ACCESS TOKEN", refresh_token="REFRESH TOKEN", expires_at=12345) # nosec user_id = "USER-1" @@ -36,9 +36,9 @@ async def test_token_encryption( @pytest.mark.asyncio -async def test_client_secret_encryption(app_config_instance: Config, admin_user: APIUser) -> None: +async def test_client_secret_encryption(app_manager_instance: DependencyManager, admin_user: APIUser) -> None: run_migrations_for_app("common") - connected_services_repo = app_config_instance.connected_services_repo + connected_services_repo = app_manager_instance.connected_services_repo new_client = apispec.ProviderPost( id="provider", kind=apispec.ProviderKind.gitlab, diff --git a/test/components/renku_data_services/crc_models/hypothesis.py b/test/components/renku_data_services/crc_models/hypothesis.py index 74d811ac1..d1ed4fd2d 100644 --- a/test/components/renku_data_services/crc_models/hypothesis.py +++ b/test/components/renku_data_services/crc_models/hypothesis.py @@ -76,7 +76,9 @@ def rc_default_strat(draw): assume(False) -quota_strat = st.builds(models.Quota, cpu=a_quota_cpu, gpu=a_quota_gpu, memory=a_quota_memory) +quota_strat = st.builds( + models.Quota, cpu=a_quota_cpu, gpu=a_quota_gpu, memory=a_quota_memory, id=st.just("random_arbitrary_value") +) quota_strat_w_id = st.builds(models.Quota, cpu=a_quota_cpu, gpu=a_quota_gpu, memory=a_quota_memory, id=a_uuid_string) diff --git a/test/components/renku_data_services/data_api/test_config.py b/test/components/renku_data_services/data_api/test_config.py index 4157e4a24..7f3a7c8e0 100644 --- a/test/components/renku_data_services/data_api/test_config.py +++ b/test/components/renku_data_services/data_api/test_config.py @@ -4,35 +4,36 @@ import pytest import pytest_asyncio -import renku_data_services.app_config.config as conf +import renku_data_services.data_api.config as conf from renku_data_services.authn.dummy import DummyAuthenticator +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.db_config.config import DBConfig from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI @pytest_asyncio.fixture -async def config_dummy_fixture(monkeypatch): +async def dependencies_dummy_fixture(monkeypatch): monkeypatch.setenv("DUMMY_STORES", "true") monkeypatch.setenv("VERSION", "9.9.9") - yield conf.Config.from_env() + yield DependencyManager.from_env() # NOTE: _async_engine is a class variable and it persist across tests because pytest loads # all things once at the beginning of hte tests. So we reset it here so that it does not affect # subsequent tests. await DBConfig.dispose_connection() -def test_config_dummy(config_dummy_fixture: conf.Config) -> None: - config = config_dummy_fixture - assert config.authenticator is not None - assert isinstance(config.authenticator, DummyAuthenticator) - assert config.storage_repo is not None - assert config.rp_repo is not None - assert config.user_repo is not None - assert config.project_repo is not None - assert config.session_repo is not None - assert config.user_preferences_repo is not None - assert config.version == "9.9.9" +def test_config_dummy(dependencies_dummy_fixture: DependencyManager) -> None: + dm = dependencies_dummy_fixture + assert dm.authenticator is not None + assert isinstance(dm.authenticator, DummyAuthenticator) + assert dm.storage_repo is not None + assert dm.rp_repo is not None + assert dm.user_repo is not None + assert dm.project_repo is not None + assert dm.session_repo is not None + assert dm.user_preferences_repo is not None + assert dm.config.version == "9.9.9" @pytest_asyncio.fixture @@ -69,7 +70,7 @@ def patch_kc_api(*args, **kwargs): monkeypatch.setattr(conf, "KeycloakAPI", patch_kc_api) - yield conf.Config.from_env() + yield DependencyManager.from_env() # NOTE: _async_engine is a class variable and it persist across tests because pytest loads # all things once at the beginning of hte tests. So we reset it here so that it does not affect # subsequent tests. @@ -77,7 +78,7 @@ def patch_kc_api(*args, **kwargs): @pytest.mark.skip(reason="Re-enable when the k8s cluster for CI is fully setup") # TODO: address in followup PR -def test_config_no_dummy(config_no_dummy_fixture: conf.Config) -> None: +def test_config_no_dummy(config_no_dummy_fixture: DependencyManager) -> None: config = config_no_dummy_fixture assert config.authenticator is not None assert config.storage_repo is not None diff --git a/test/components/renku_data_services/db/test_sqlalchemy_pool_repo.py b/test/components/renku_data_services/db/test_sqlalchemy_pool_repo.py index 48c6d94b7..fbc34a416 100644 --- a/test/components/renku_data_services/db/test_sqlalchemy_pool_repo.py +++ b/test/components/renku_data_services/db/test_sqlalchemy_pool_repo.py @@ -7,8 +7,8 @@ import renku_data_services.base_models as base_models from renku_data_services import errors -from renku_data_services.app_config import Config from renku_data_services.crc import models +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from test.components.renku_data_services.crc_models.hypothesis import ( a_name, @@ -26,11 +26,11 @@ @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_resource_pool_insert_get( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, admin_user) except (ValidationError, errors.ValidationError): @@ -45,13 +45,13 @@ async def test_resource_pool_insert_get( @pytest.mark.asyncio async def test_resource_pool_update_name( rp: models.ResourcePool, - app_config_instance: Config, + app_manager_instance: DependencyManager, new_name: str, admin_user: base_models.APIUser, ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -72,10 +72,10 @@ async def test_resource_pool_update_name( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_resource_pool_update_quota( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -103,11 +103,11 @@ async def test_resource_pool_update_quota( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_resource_pool_update_classes( - rp: models.ResourcePool, app_config_instance: Config, data, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, data, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -146,11 +146,11 @@ async def test_resource_pool_update_classes( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_get_classes( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -167,11 +167,11 @@ async def test_get_classes( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_get_class_by_id( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -191,11 +191,11 @@ async def test_get_class_by_id( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_get_class_by_name( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -215,10 +215,10 @@ async def test_get_class_by_name( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_resource_pool_delete( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -235,12 +235,12 @@ async def test_resource_pool_delete( async def test_resource_class_create( rc: models.ResourceClass, rp: models.ResourcePool, - app_config_instance: Config, + app_manager_instance: DependencyManager, admin_user: base_models.APIUser, ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -267,11 +267,11 @@ async def test_resource_class_create( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_resource_class_delete( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -302,13 +302,13 @@ async def test_resource_class_delete( @pytest.mark.asyncio async def test_resource_class_update( rp: models.ResourcePool, - app_config_instance: Config, + app_manager_instance: DependencyManager, admin_user: base_models.APIUser, rc_update: dict, ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await pool_repo.insert_resource_pool(resource_pool=rp, api_user=admin_user) assert inserted_rp is not None @@ -351,11 +351,11 @@ async def test_resource_class_update( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_lookup_rp_by_name( - rp: models.ResourcePool, app_config_instance: Config, admin_user: base_models.APIUser + rp: models.ResourcePool, app_manager_instance: DependencyManager, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") inserted_rp = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp = await create_rp(rp, pool_repo, api_user=admin_user) assert inserted_rp.id is not None @@ -373,11 +373,11 @@ async def test_lookup_rp_by_name( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_insert_class_in_nonexisting_rp( - app_config_instance: Config, rc: models.ResourceClass, admin_user: base_models.APIUser + app_manager_instance: DependencyManager, rc: models.ResourceClass, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") with pytest.raises(errors.MissingResourceError): - await app_config_instance.rp_repo.insert_resource_class( + await app_manager_instance.rp_repo.insert_resource_class( resource_class=rc, resource_pool_id=99999, api_user=admin_user ) @@ -386,11 +386,11 @@ async def test_insert_class_in_nonexisting_rp( @settings(max_examples=5, suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_update_quota_in_nonexisting_rp( - app_config_instance: Config, new_quota_id: str, admin_user: base_models.APIUser + app_manager_instance: DependencyManager, new_quota_id: str, admin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") with pytest.raises(errors.MissingResourceError): - await app_config_instance.rp_repo.update_resource_pool(id=99999, api_user=admin_user, quota=new_quota_id) + await app_manager_instance.rp_repo.update_resource_pool(id=99999, api_user=admin_user, quota=new_quota_id) @given(public_rp=public_rp_strat, private_rp=private_rp_strat) @@ -401,13 +401,13 @@ async def test_resource_pools_access_control( private_rp: models.ResourcePool, admin_user: base_models.APIUser, loggedin_user: base_models.APIUser, - app_config_instance: Config, + app_manager_instance: DependencyManager, ) -> None: run_migrations_for_app("common") inserted_public_rp = None inserted_private_rp = None - pool_repo = app_config_instance.rp_repo - user_repo = app_config_instance.user_repo + pool_repo = app_manager_instance.rp_repo + user_repo = app_manager_instance.user_repo try: inserted_public_rp = await create_rp(public_rp, pool_repo, admin_user) assert inserted_public_rp.id is not None @@ -444,12 +444,12 @@ async def test_classes_filtering( rp1: models.ResourcePool, rp2: models.ResourcePool, admin_user: base_models.APIUser, - app_config_instance: Config, + app_manager_instance: DependencyManager, ) -> None: run_migrations_for_app("common") inserted_rp1 = None inserted_rp2 = None - pool_repo = app_config_instance.rp_repo + pool_repo = app_manager_instance.rp_repo try: inserted_rp1 = await create_rp(rp1, pool_repo, api_user=admin_user) inserted_rp2 = await create_rp(rp2, pool_repo, api_user=admin_user) diff --git a/test/components/renku_data_services/db/test_sqlalchemy_storage_repo.py b/test/components/renku_data_services/db/test_sqlalchemy_storage_repo.py index a34f7d160..6b3badffe 100644 --- a/test/components/renku_data_services/db/test_sqlalchemy_storage_repo.py +++ b/test/components/renku_data_services/db/test_sqlalchemy_storage_repo.py @@ -7,8 +7,8 @@ from pydantic import ValidationError from renku_data_services import errors -from renku_data_services.app_config import Config from renku_data_services.base_models.core import APIUser +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from test.components.renku_data_services.storage_models.hypothesis import ( a_path, @@ -34,11 +34,11 @@ def user(): @pytest.mark.asyncio async def test_storage_insert_get( storage: dict[str, Any], - app_config_instance: Config, + app_manager_instance: DependencyManager, user: APIUser, ) -> None: run_migrations_for_app("common") - storage_repo = app_config_instance.storage_repo + storage_repo = app_manager_instance.storage_repo with contextlib.suppress(ValidationError, errors.ValidationError): await create_storage(storage, storage_repo, user=user) @@ -50,11 +50,11 @@ async def test_storage_update_path( storage: dict[str, Any], new_source_path: str, new_target_path: str, - app_config_instance: Config, + app_manager_instance: DependencyManager, user: APIUser, ) -> None: run_migrations_for_app("common") - storage_repo = app_config_instance.storage_repo + storage_repo = app_manager_instance.storage_repo try: inserted_storage = await create_storage(storage, storage_repo, user) assert inserted_storage.storage_id is not None @@ -75,11 +75,11 @@ async def test_storage_update_path( async def test_storage_update_config( storage: dict[str, Any], new_config: dict[str, Any], - app_config_instance: Config, + app_manager_instance: DependencyManager, user: APIUser, ) -> None: run_migrations_for_app("common") - storage_repo = app_config_instance.storage_repo + storage_repo = app_manager_instance.storage_repo try: inserted_storage = await create_storage(storage, storage_repo, user) assert inserted_storage.storage_id is not None @@ -98,11 +98,11 @@ async def test_storage_update_config( @pytest.mark.asyncio async def test_storage_delete( storage: dict[str, Any], - app_config_instance: Config, + app_manager_instance: DependencyManager, user: APIUser, ) -> None: run_migrations_for_app("common") - storage_repo = app_config_instance.storage_repo + storage_repo = app_manager_instance.storage_repo try: inserted_storage = await create_storage(storage, storage_repo, user) assert inserted_storage.storage_id is not None diff --git a/test/components/renku_data_services/db/test_sqlalchemy_user_preferences_repo.py b/test/components/renku_data_services/db/test_sqlalchemy_user_preferences_repo.py index 2c38865a6..580ede683 100644 --- a/test/components/renku_data_services/db/test_sqlalchemy_user_preferences_repo.py +++ b/test/components/renku_data_services/db/test_sqlalchemy_user_preferences_repo.py @@ -3,7 +3,7 @@ import renku_data_services.base_models as base_models from renku_data_services import errors -from renku_data_services.app_config import Config +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from test.components.renku_data_services.user_preferences_models.hypothesis import ( project_slug_strat, @@ -16,10 +16,10 @@ @settings(suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None) @pytest.mark.asyncio async def test_user_preferences_insert_get( - project_slug: str, app_config_instance: Config, loggedin_user: base_models.APIUser + project_slug: str, app_manager_instance: DependencyManager, loggedin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") - user_preferences_repo = app_config_instance.user_preferences_repo + user_preferences_repo = app_manager_instance.user_preferences_repo try: await create_user_preferences(project_slug=project_slug, repo=user_preferences_repo, user=loggedin_user) finally: @@ -30,12 +30,12 @@ async def test_user_preferences_insert_get( @settings(suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None, max_examples=25) @pytest.mark.asyncio async def test_user_preferences_add_pinned_project( - project_slugs: list[str], app_config_instance: Config, loggedin_user: base_models.APIUser + project_slugs: list[str], app_manager_instance: DependencyManager, loggedin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") target(len(project_slugs)) - user_preferences_repo = app_config_instance.user_preferences_repo - project_slugs = project_slugs[: app_config_instance.user_preferences_config.max_pinned_projects] + user_preferences_repo = app_manager_instance.user_preferences_repo + project_slugs = project_slugs[: app_manager_instance.config.user_preferences.max_pinned_projects] try: for project_slug in project_slugs: await user_preferences_repo.add_pinned_project(requested_by=loggedin_user, project_slug=project_slug) @@ -53,12 +53,12 @@ async def test_user_preferences_add_pinned_project( @settings(suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None, max_examples=25) @pytest.mark.asyncio async def test_user_preferences_add_pinned_project_existing( - project_slugs: list[str], app_config_instance: Config, loggedin_user: base_models.APIUser + project_slugs: list[str], app_manager_instance: DependencyManager, loggedin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") target(len(project_slugs)) - user_preferences_repo = app_config_instance.user_preferences_repo - project_slugs = project_slugs[: app_config_instance.user_preferences_config.max_pinned_projects] + user_preferences_repo = app_manager_instance.user_preferences_repo + project_slugs = project_slugs[: app_manager_instance.config.user_preferences.max_pinned_projects] try: for project_slug in project_slugs: await user_preferences_repo.add_pinned_project(requested_by=loggedin_user, project_slug=project_slug) @@ -77,12 +77,12 @@ async def test_user_preferences_add_pinned_project_existing( @settings(suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None, max_examples=25) @pytest.mark.asyncio async def test_user_preferences_delete_pinned_project( - project_slugs: list[str], app_config_instance: Config, loggedin_user: base_models.APIUser + project_slugs: list[str], app_manager_instance: DependencyManager, loggedin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") target(len(project_slugs)) - user_preferences_repo = app_config_instance.user_preferences_repo - project_slugs_valid = project_slugs[: app_config_instance.user_preferences_config.max_pinned_projects] + user_preferences_repo = app_manager_instance.user_preferences_repo + project_slugs_valid = project_slugs[: app_manager_instance.config.user_preferences.max_pinned_projects] try: for project_slug in project_slugs_valid: await user_preferences_repo.add_pinned_project(requested_by=loggedin_user, project_slug=project_slug) @@ -102,13 +102,13 @@ async def test_user_preferences_delete_pinned_project( @settings(suppress_health_check=[HealthCheck.function_scoped_fixture], deadline=None, max_examples=25) @pytest.mark.asyncio async def test_user_preferences_add_pinned_project_respects_maximum( - project_slugs: list[str], app_config_instance: Config, loggedin_user: base_models.APIUser + project_slugs: list[str], app_manager_instance: DependencyManager, loggedin_user: base_models.APIUser ) -> None: run_migrations_for_app("common") target(len(project_slugs)) - user_preferences_repo = app_config_instance.user_preferences_repo - project_slugs_valid = project_slugs[: app_config_instance.user_preferences_config.max_pinned_projects] - project_slugs_invalid = project_slugs[app_config_instance.user_preferences_config.max_pinned_projects :] + user_preferences_repo = app_manager_instance.user_preferences_repo + project_slugs_valid = project_slugs[: app_manager_instance.config.user_preferences.max_pinned_projects] + project_slugs_invalid = project_slugs[app_manager_instance.config.user_preferences.max_pinned_projects :] try: for project_slug in project_slugs_valid: await user_preferences_repo.add_pinned_project(requested_by=loggedin_user, project_slug=project_slug) diff --git a/test/components/renku_data_services/k8s/test_k8s_adapter.py b/test/components/renku_data_services/k8s/test_k8s_adapter.py index c1fd6b128..89f6188b2 100644 --- a/test/components/renku_data_services/k8s/test_k8s_adapter.py +++ b/test/components/renku_data_services/k8s/test_k8s_adapter.py @@ -1,11 +1,26 @@ from dataclasses import asdict +from box import Box from hypothesis import given +from kr8s.asyncio.objects import StatefulSet from kubernetes import client +from kubernetes.client import ( + V1Container, + V1EnvVar, + V1EnvVarSource, + V1LabelSelector, + V1PodSpec, + V1PodTemplateSpec, + V1StatefulSet, + V1StatefulSetSpec, +) from renku_data_services.crc import models from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.notebooks.api.classes.auth import RenkuTokens +from renku_data_services.notebooks.api.classes.k8s_client import NotebookK8sClient +from renku_data_services.notebooks.util.kubernetes_ import find_env_var from test.components.renku_data_services.crc_models.hypothesis import quota_strat @@ -94,3 +109,81 @@ def test_update_quota(old_quota: models.Quota, new_quota: models.Quota) -> None: finally: if old_quota is not None: quota_repo.delete_quota(old_quota.id) + + +def test_find_env_var() -> None: + env = [Box(name="key1", value="val1"), Box(name="key2", value="val2")] + assert find_env_var(env, "key1") == (0, env[0]) + assert find_env_var(env, "key2") == (1, env[1]) + assert find_env_var(env, "missing") is None + + +def test_patch_statefulset_tokens() -> None: + git_clone_access_env = "GIT_CLONE_USER__RENKU_TOKEN" + git_proxy_access_env = "GIT_PROXY_RENKU_ACCESS_TOKEN" + git_proxy_refresh_env = "GIT_PROXY_RENKU_REFRESH_TOKEN" + secrets_access_env = "RENKU_ACCESS_TOKEN" + git_clone = V1Container( + name="git-clone", + env=[ + V1EnvVar(name="test", value="value"), + V1EnvVar(git_clone_access_env, "old_value"), + V1EnvVar(name="test-from-source", value_from=V1EnvVarSource()), + ], + ) + git_proxy = V1Container( + name="git-proxy", + env=[ + V1EnvVar(name="test", value="value"), + V1EnvVar(name="test-from-source", value_from=V1EnvVarSource()), + V1EnvVar(git_proxy_refresh_env, "old_value"), + V1EnvVar(git_proxy_access_env, "old_value"), + ], + ) + secrets = V1Container( + name="init-user-secrets", + env=[ + V1EnvVar(secrets_access_env, "old_value"), + V1EnvVar(name="test", value="value"), + V1EnvVar(name="test-from-source", value_from=V1EnvVarSource()), + ], + ) + random1 = V1Container(name="random1") + random2 = V1Container( + name="random2", + env=[ + V1EnvVar(name="test", value="value"), + V1EnvVar(name="test-from-source", value_from=V1EnvVarSource()), + ], + ) + + new_renku_tokens = RenkuTokens(access_token="new_renku_access_token", refresh_token="new_renku_refresh_token") + + sts = V1StatefulSet( + spec=V1StatefulSetSpec( + service_name="test", + selector=V1LabelSelector(), + template=V1PodTemplateSpec( + spec=V1PodSpec( + containers=[git_proxy, random1, random2], init_containers=[git_clone, random1, secrets, random2] + ) + ), + ) + ) + sanitized_sts = client.ApiClient().sanitize_for_serialization(sts) + patches = NotebookK8sClient._get_statefulset_token_patches(StatefulSet(sanitized_sts), new_renku_tokens) + + # Order of patches should be git proxy access, git proxy refresh, git clone, secrets + assert len(patches) == 4 + # Git proxy access token + assert patches[0]["path"] == "/spec/template/spec/containers/0/env/3/value" + assert patches[0]["value"] == new_renku_tokens.access_token + # Git proxy refresh token + assert patches[1]["path"] == "/spec/template/spec/containers/0/env/2/value" + assert patches[1]["value"] == new_renku_tokens.refresh_token + # Git clone + assert patches[2]["path"] == "/spec/template/spec/initContainers/0/env/1/value" + assert patches[2]["value"] == new_renku_tokens.access_token + # Secrets init + assert patches[3]["path"] == "/spec/template/spec/initContainers/2/env/0/value" + assert patches[3]["value"] == new_renku_tokens.access_token diff --git a/test/components/renku_data_services/message_queue/__init__.py b/test/components/renku_data_services/message_queue/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/test/components/renku_data_services/message_queue/test_EventORM.py b/test/components/renku_data_services/message_queue/test_EventORM.py deleted file mode 100644 index e335bbb50..000000000 --- a/test/components/renku_data_services/message_queue/test_EventORM.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Tests for the EventORM class""" - -import json - -from renku_data_services.message_queue.converters import QUEUE_NAME -from renku_data_services.message_queue.models import Event -from renku_data_services.message_queue.orm import EventORM - - -def test_message_type_getter(app_config) -> None: - # the messages are stored in the database, where `headers` is a stringifyied dict - raw_message = json.loads( - '{"id":"1","headers":"{\\"source\\":\\"renku-data-services\\",\\"type\\":\\"project.created\\",\\"dataContentType\\":\\"application/avro+binary\\",\\"schemaVersion\\":\\"2\\",\\"time\\":1,\\"requestId\\": \\"0\\"}","payload": ""}' # noqa: E501 - ) - event = Event(QUEUE_NAME, raw_message) - event_orm = EventORM.load(event) - mt = event_orm.get_message_type() - assert mt == "project.created" - - -def test_message_type_getter_none(app_config) -> None: - raw_message = json.loads( - '{"id":"1","headers":"{\\"source\\":\\"renku-data-services\\",\\"dataContentType\\":\\"application/avro+binary\\",\\"schemaVersion\\":\\"2\\",\\"time\\":1,\\"requestId\\": \\"0\\"}","payload": ""}' # noqa: E501 - ) - event = Event(QUEUE_NAME, raw_message) - event_orm = EventORM.load(event) - mt = event_orm.get_message_type() - assert mt is None diff --git a/test/components/renku_data_services/message_queue/test_queue.py b/test/components/renku_data_services/message_queue/test_queue.py deleted file mode 100644 index 1b300b3da..000000000 --- a/test/components/renku_data_services/message_queue/test_queue.py +++ /dev/null @@ -1,63 +0,0 @@ -"""Test for the message queue.""" - -import pytest - -from renku_data_services.authz.models import Visibility -from renku_data_services.message_queue.avro_models.io.renku.events.v2.project_removed import ProjectRemoved -from renku_data_services.message_queue.converters import QUEUE_NAME -from renku_data_services.message_queue.redis_queue import dispatch_message -from renku_data_services.migrations.core import run_migrations_for_app -from renku_data_services.namespace.models import Namespace, NamespaceKind -from renku_data_services.project.models import Project -from renku_data_services.utils.core import with_db_transaction - - -@pytest.mark.asyncio -async def test_queue_send(app_config_instance) -> None: - """Test that sending messages works.""" - run_migrations_for_app("common") - - class FakeRepo: - session_maker = app_config_instance.db.async_session_maker - event_repo = app_config_instance.event_repo - message_queue = app_config_instance.message_queue - - @with_db_transaction - @dispatch_message(ProjectRemoved) - async def fake_db_method(self, *, session, some_arg): - return Project( - id="sample-id-1", - name="name", - slug="slug", - namespace=Namespace( - "namespace", - "namespace", - NamespaceKind.user, - created_by="some-user", - underlying_resource_id="some-user", - ), - visibility=Visibility.PRIVATE, - created_by="some-user", - ) - - fakerepo = FakeRepo() - await fakerepo.fake_db_method(some_arg="test") - - events = await app_config_instance.redis.redis_connection.xrange(QUEUE_NAME) - assert len(events) == 0 - pending_events = await app_config_instance.event_repo.get_pending_events() - assert len(pending_events) == 1 - - await app_config_instance.event_repo.send_pending_events() - - events = await app_config_instance.redis.redis_connection.xrange(QUEUE_NAME) - assert len(events) == 1 - pending_events = await app_config_instance.event_repo.get_pending_events() - assert len(pending_events) == 0 - - await app_config_instance.event_repo.send_pending_events() - - events = await app_config_instance.redis.redis_connection.xrange(QUEUE_NAME) - assert len(events) == 1 - pending_events = await app_config_instance.event_repo.get_pending_events() - assert len(pending_events) == 0 diff --git a/test/components/renku_data_services/search/test_SearchUpdatesORM.py b/test/components/renku_data_services/search/test_SearchUpdatesORM.py new file mode 100644 index 000000000..c0dd8b9f2 --- /dev/null +++ b/test/components/renku_data_services/search/test_SearchUpdatesORM.py @@ -0,0 +1,25 @@ +"""Tests for the SearchUpdatesORM.""" + +from datetime import datetime + +import pytest +from sqlalchemy import select + +from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.search.orm import SearchUpdatesORM + + +@pytest.mark.asyncio +async def test_insert_and_retrieve(app_manager_instance): + run_migrations_for_app("common") + async with app_manager_instance.config.db.async_session_maker() as session: + async with session.begin(): + row = SearchUpdatesORM(entity_id="user47", entity_type="User", created_at=datetime.now(), payload={}) + session.add_all([row]) + await session.commit() + + await session.begin() + res = await session.scalars(select(SearchUpdatesORM).order_by(SearchUpdatesORM.id)) + record = res.one() + assert row.entity_id == record.entity_id + assert len(str(record.id)) > 0 diff --git a/test/components/renku_data_services/search/test_core.py b/test/components/renku_data_services/search/test_core.py new file mode 100644 index 000000000..c72a14ae9 --- /dev/null +++ b/test/components/renku_data_services/search/test_core.py @@ -0,0 +1,80 @@ +"""Tests for the core functions.""" + +import pytest +import sqlalchemy as sa +from ulid import ULID + +import renku_data_services.search.core as core +from renku_data_services.base_models.core import NamespacePath +from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.namespace.models import UserNamespace +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.orm import RecordState, SearchUpdatesORM +from renku_data_services.solr.entity_documents import User +from renku_data_services.solr.solr_client import DefaultSolrClient, SolrClientConfig, SolrQuery +from renku_data_services.users.models import UserInfo + +user_namespace = UserNamespace( + id=ULID(), + created_by="userid_2", + underlying_resource_id=str(ULID()), + path=NamespacePath.from_strings("user"), +) + + +@pytest.mark.asyncio +async def test_update_solr(app_manager_instance, solr_search): + run_migrations_for_app("common") + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + + user = UserInfo(id="user123", first_name="Tadej", last_name="Pogacar", namespace=user_namespace) + await repo.upsert(user, started_at=None) + + user = UserInfo(id="user234", first_name="Greg", last_name="Lemond", namespace=user_namespace) + await repo.upsert(user, started_at=None) + + async with DefaultSolrClient(solr_search) as client: + before = await client.query(SolrQuery.query_all_fields("_type:*")) + assert len(before.response.docs) == 0 + + await core.update_solr(repo, client, 10) + + result = await client.query(SolrQuery.query_all_fields("_type:*")) + assert len(result.response.docs) == 2 + entities = await repo.select_next(10) + assert len(entities) == 0 + + user = UserInfo(id="user234", first_name="Greg", last_name="Larrsson", namespace=user_namespace) + await repo.upsert(user, started_at=None) + await core.update_solr(repo, client, 10) + entities = await repo.select_next(10) + assert len(entities) == 0 + doc = await client.get("user234") + users = doc.response.read_to(User.from_dict) + assert users[0].lastName == "Larrsson" + + +@pytest.mark.asyncio +async def test_update_no_solr(app_manager_instance): + run_migrations_for_app("common") + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + + user = UserInfo(id="user123", first_name="Tadej", last_name="Pogacar", namespace=user_namespace) + await repo.upsert(user, started_at=None) + + user = UserInfo(id="user234", first_name="Greg", last_name="Lemond", namespace=user_namespace) + await repo.upsert(user, started_at=None) + + solr_config = SolrClientConfig(base_url="", core="_none_") + + async with DefaultSolrClient(solr_config) as client: + try: + await core.update_solr(repo, client, 10) + raise Exception("Not expected to succeed") + except Exception as _: + entities = await repo.select_next(10) + assert len(entities) == 0 + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + res = await session.scalars(sa.select(SearchUpdatesORM).order_by(SearchUpdatesORM.id)) + states = [s.state for s in res.all()] + assert states == [RecordState.Failed, RecordState.Failed] diff --git a/test/components/renku_data_services/search/test_db.py b/test/components/renku_data_services/search/test_db.py new file mode 100644 index 000000000..e2e96930b --- /dev/null +++ b/test/components/renku_data_services/search/test_db.py @@ -0,0 +1,174 @@ +"""Tests for the repository.""" + +from datetime import datetime + +import pytest +from ulid import ULID + +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.core import NamespacePath, NamespaceSlug, ProjectPath, ProjectSlug +from renku_data_services.data_connectors.models import CloudStorageCore, DataConnector +from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.namespace.models import ProjectNamespace, UserNamespace +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.models import DeleteDoc +from renku_data_services.solr.entity_documents import DataConnector as DataConnectorDoc +from renku_data_services.solr.entity_documents import User as UserDoc +from renku_data_services.users.models import UserInfo + +user_namespace = UserNamespace( + id=ULID(), + created_by="userid_2", + underlying_resource_id=str(ULID()), + path=NamespacePath.from_strings("user"), +) +project_namespace = ProjectNamespace( + id=ULID(), + created_by="user_id_3", + path=ProjectPath(NamespaceSlug("hello-word"), ProjectSlug("project-1")), + underlying_resource_id=ULID(), +) + + +@pytest.mark.asyncio +async def test_data_connector_within_project(app_manager_instance): + run_migrations_for_app("common") + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + dc = DataConnector( + id=ULID(), + name="my greater dc", + storage=CloudStorageCore( + storage_type="s3", configuration={}, source_path="/a", target_path="/b", readonly=True + ), + slug="dc-2", + visibility=Visibility.PUBLIC, + created_by="user_id_3", + namespace=project_namespace, + ) + orm_id = await repo.upsert(dc, started_at=None) + db_doc = await repo.find_by_id(orm_id) + if db_doc is None: + raise Exception("dataconnector not found") + dc_from_payload = DataConnectorDoc.from_dict(db_doc.payload) + assert dc.id == dc_from_payload.id + assert dc.name == dc_from_payload.name + assert dc.path.serialize() == dc_from_payload.path + assert dc.creation_date.replace(microsecond=0) == dc_from_payload.creationDate + assert dc.visibility == dc_from_payload.visibility + assert dc.slug == dc_from_payload.slug.value + assert dc.storage.storage_type == dc_from_payload.storageType + + +@pytest.mark.asyncio +async def test_data_connector_upsert(app_manager_instance): + run_migrations_for_app("common") + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + dc = DataConnector( + id=ULID(), + name="mygreat dc", + storage=CloudStorageCore( + storage_type="s3", configuration={}, source_path="/a", target_path="/b", readonly=True + ), + slug="dc-1", + visibility=Visibility.PUBLIC, + created_by="userid_2", + namespace=user_namespace, + updated_at=datetime.now(), + ) + orm_id = await repo.upsert(dc, started_at=None) + db_doc = await repo.find_by_id(orm_id) + if db_doc is None: + raise Exception("dataconnector not found") + dc_from_payload = DataConnectorDoc.from_dict(db_doc.payload) + assert dc.id == dc_from_payload.id + assert dc.name == dc_from_payload.name + assert dc.path.serialize() == dc_from_payload.path + assert dc.creation_date.replace(microsecond=0) == dc_from_payload.creationDate + assert dc.visibility == dc_from_payload.visibility + assert dc.slug == dc_from_payload.slug.value + assert dc.storage.storage_type == dc_from_payload.storageType + + +@pytest.mark.asyncio +async def test_delete_doc(app_manager_instance): + run_migrations_for_app("common") + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + doc = DeleteDoc.user("user1234") + orm_id = await repo.upsert(doc) + db_doc = await repo.find_by_id(orm_id) + assert db_doc is not None + assert db_doc.entity_type == "User" + assert db_doc.payload == {"id": "user1234", "deleted": True} + + +@pytest.mark.asyncio +async def test_user_upsert(app_manager_instance): + run_migrations_for_app("common") + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + user = UserInfo(id="user123", first_name="Tadej", last_name="Pogacar", namespace=user_namespace) + orm_id = await repo.upsert(user, started_at=None) + + user = UserInfo(id="user123", first_name="Tadej", last_name="Pogačar", namespace=user_namespace) + orm_id2 = await repo.upsert(user, started_at=None) + + assert orm_id == orm_id2 + + db_user = await repo.find_by_id(orm_id) + if db_user is None: + raise Exception("user not found") + + user = UserDoc.model_validate(db_user.payload) + assert user.lastName == "Pogačar" + + +@pytest.mark.asyncio +async def test_user_insert_only(app_manager_instance): + run_migrations_for_app("common") + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + user = UserInfo(id="user123", first_name="Tadej", last_name="Pogacar", namespace=user_namespace) + orm_id = await repo.insert(user, started_at=None) + + user = UserInfo(id="user123", first_name="Tadej", last_name="Pogačar", namespace=user_namespace) + orm_id2 = await repo.insert(user, started_at=None) + + assert orm_id == orm_id2 + + db_user = await repo.find_by_id(orm_id) + if db_user is None: + raise Exception("user not found") + + assert db_user.entity_type == "User" + user = UserDoc.model_validate(db_user.payload) + assert user.lastName == "Pogacar" + + +async def test_select_next(app_manager_instance): + run_migrations_for_app("common") + + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + user1 = UserInfo(id="user123", first_name="Tadej", last_name="Pogacar", namespace=user_namespace) + id1 = await repo.insert(user1, started_at=None) + user2 = UserInfo(id="user234", first_name="Greg", last_name="Lemond", namespace=user_namespace) + id2 = await repo.insert(user2, started_at=None) + + records = await repo.select_next(10) + assert len(records) == 2 + assert [e.id for e in records] == [id1, id2] + + records2 = await repo.select_next(10) + assert len(records2) == 0 + + +async def test_mark_processed(app_manager_instance): + run_migrations_for_app("common") + + repo = SearchUpdatesRepo(app_manager_instance.config.db.async_session_maker) + user1 = UserInfo(id="user123", first_name="Tadej", last_name="Pogacar", namespace=user_namespace) + await repo.insert(user1, started_at=None) + user2 = UserInfo(id="user234", first_name="Greg", last_name="Lemond", namespace=user_namespace) + await repo.insert(user2, started_at=None) + + records = await repo.select_next(1) + assert len(records) == 1 + + await repo.mark_processed([e.id for e in records]) diff --git a/test/components/renku_data_services/search/test_query_manual.py b/test/components/renku_data_services/search/test_query_manual.py new file mode 100644 index 000000000..913756a0c --- /dev/null +++ b/test/components/renku_data_services/search/test_query_manual.py @@ -0,0 +1,38 @@ +"""Tests for the query manual.""" + +from textwrap import dedent + +import pytest +from markdown_it import MarkdownIt + +from renku_data_services.search import query_manual + + +def test_create_manual() -> None: + query_manual.manual_to_str() + + +## When editing the manual it is quicker to run this test and open the +## html file in the browser. It adds the (ugly :-)) swagger ui styles +## for a more realistic preview +@pytest.mark.skip(reason="This is not really an automated test.") +def test_manual_html() -> None: + md = MarkdownIt("commonmark", {"breaks": False, "html": True}) + html = md.render(query_manual.manual_to_str()) + page = dedent(f""" + + + + +
+
+
+
+
+ {html} +
+ + + """) + with open("manual.html", "w") as f: + f.write(page) diff --git a/test/components/renku_data_services/search/test_reprovision.py b/test/components/renku_data_services/search/test_reprovision.py new file mode 100644 index 000000000..017dd4dbd --- /dev/null +++ b/test/components/renku_data_services/search/test_reprovision.py @@ -0,0 +1,187 @@ +"""Tests for reprovision module.""" + +from dataclasses import dataclass + +import pytest +from ulid import ULID + +from renku_data_services.authz.authz import Authz +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.core import APIUser, NamespacePath +from renku_data_services.data_connectors.db import DataConnectorRepository +from renku_data_services.data_connectors.models import ( + CloudStorageCore, + DataConnector, + GlobalDataConnector, + UnsavedDataConnector, +) +from renku_data_services.message_queue.db import ReprovisioningRepository +from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.namespace.db import GroupRepository +from renku_data_services.namespace.models import Group, UnsavedGroup, UserNamespace +from renku_data_services.project.db import ProjectRepository +from renku_data_services.project.models import Project, UnsavedProject +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.reprovision import SearchReprovision +from renku_data_services.users.db import UserRepo + +admin = APIUser(id="the-admin-1", is_admin=True) +user_namespace = UserNamespace( + id=ULID(), + created_by="userid_2", + underlying_resource_id=str(ULID()), + path=NamespacePath.from_strings("user"), +) + + +@dataclass +class Setup: + group_repo: GroupRepository + user_repo: UserRepo + project_repo: ProjectRepository + data_connector_repo: DataConnectorRepository + search_update_repo: SearchUpdatesRepo + search_reprovision: SearchReprovision + + +def make_setup(app_manager_instance, solr_config) -> Setup: + run_migrations_for_app("common") + sess = app_manager_instance.config.db.async_session_maker + search_updates = SearchUpdatesRepo(sess) + authz = Authz(app_manager_instance.config.authz_config) + gr = GroupRepository(sess, authz, search_updates) + ur = UserRepo(sess, gr, search_updates, None, authz) + pr = ProjectRepository(sess, gr, search_updates, authz) + dcr = DataConnectorRepository(sess, authz, pr, gr, search_updates) + sr = SearchReprovision( + search_updates_repo=search_updates, + reprovisioning_repo=ReprovisioningRepository(sess), + solr_config=solr_config, + user_repo=ur, + group_repo=gr, + project_repo=pr, + data_connector_repo=dcr, + ) + return Setup( + group_repo=gr, + user_repo=ur, + project_repo=pr, + data_connector_repo=dcr, + search_reprovision=sr, + search_update_repo=search_updates, + ) + + +async def make_data_connectors(setup: Setup, count: int = 10) -> list[DataConnector]: + user = await setup.user_repo.get_or_create_user(admin, "the-admin-1") + if user is None: + raise Exception("User not created") + + result = [] + for n in range(0, count): + dc = UnsavedDataConnector( + name=f"my dc {n}", + visibility=Visibility.PUBLIC, + created_by="me", + slug=f"dc-{n}", + namespace=user.namespace.path, + storage=CloudStorageCore( + storage_type="s3", configuration={}, source_path="a", target_path="b", readonly=True + ), + ) + dc = await setup.data_connector_repo.insert_namespaced_data_connector(admin, dc) + result.append(dc) + assert len(result) == count + result.sort(key=lambda e: e.id) + return result + + +async def make_groups(setup: Setup, count: int) -> list[Group]: + result: list[Group] = [] + for n in range(0, count): + g = UnsavedGroup(slug=f"group-{n}", name=f"Group name {n}") + g = await setup.group_repo.insert_group(admin, g) + result.append(g) + + result.sort(key=lambda e: e.id) + return result + + +async def make_projects(setup: Setup, count: int) -> list[Project]: + user = await setup.user_repo.get_or_create_user(admin, "the-admin-1") + if user is None: + raise Exception("User not created") + + result: list[Project] = [] + for n in range(0, count): + p = UnsavedProject( + name=f"project name {n}", + slug=f"project-slug-{n}", + visibility=Visibility.PUBLIC, + created_by="me", + namespace=user.namespace.path.serialize(), + ) + p = await setup.project_repo.insert_project(admin, p) + result.append(p) + result.sort(key=lambda e: e.id) + return result + + +async def get_all_connectors(setup: Setup, per_page: int) -> list[DataConnector | GlobalDataConnector]: + result = [item async for item in setup.search_reprovision._get_all_data_connectors(admin, per_page=per_page)] + result.sort(key=lambda e: e.id) + return result + + +@pytest.mark.asyncio +async def test_get_data_connectors(app_manager_instance) -> None: + setup = make_setup(app_manager_instance, solr_config={}) + inserted_dcs = await make_data_connectors(setup, 10) + + dcs = await get_all_connectors(setup, per_page=20) + assert dcs == inserted_dcs + + dcs = await get_all_connectors(setup, per_page=10) + assert dcs == inserted_dcs + + dcs = await get_all_connectors(setup, per_page=5) + assert dcs == inserted_dcs + + dcs = await get_all_connectors(setup, per_page=3) + assert dcs == inserted_dcs + + +@pytest.mark.asyncio +async def test_run_reprovision(app_manager_instance, solr_search, admin_user) -> None: + setup = make_setup(app_manager_instance, solr_search) + dcs = await make_data_connectors(setup, 5) + groups = await make_groups(setup, 4) + projects = await make_projects(setup, 3) + users = [item async for item in setup.user_repo.get_all_users(admin)] + + count = await setup.search_reprovision.run_reprovision(admin_user) + + next = await setup.search_update_repo.select_next(20) + + user_orm = set() + project_orm = set() + group_orm = set() + dc_orm = set() + for e in next: + match e.entity_type: + case "Project": + project_orm.add(e.entity_id) + case "User": + user_orm.add(e.entity_id) + case "Group": + group_orm.add(e.entity_id) + case "DataConnector": + dc_orm.add(e.entity_id) + case _: + raise Exception(f"entity type not handled: {e.entity_type}") + + assert count == (len(user_orm) + len(project_orm) + len(group_orm) + len(dc_orm)) + assert user_orm == {e.id for e in users} + assert project_orm == {str(e.id) for e in projects} + assert group_orm == {str(e.id) for e in groups} + assert dc_orm == {str(e.id) for e in dcs} diff --git a/test/components/renku_data_services/search/test_solr_token.py b/test/components/renku_data_services/search/test_solr_token.py new file mode 100644 index 000000000..34bda542c --- /dev/null +++ b/test/components/renku_data_services/search/test_solr_token.py @@ -0,0 +1,136 @@ +"""Tests for solr_token.""" + +from datetime import UTC, datetime, timedelta +from zoneinfo import ZoneInfo + +import renku_data_services.search.solr_token as st +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.nel import Nel +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.solr_schema import FieldName + +ref_date: datetime = datetime(2024, 2, 27, 15, 34, 55, tzinfo=UTC) +ref_date2: datetime = datetime(2024, 4, 26, 7, 16, 12, tzinfo=ZoneInfo("Europe/Berlin")) + + +def test_empty() -> None: + assert st.empty() == "" + + +def test_all_query() -> None: + assert st.all_query() == "*:*" + + +def test_from_str() -> None: + assert st.from_str("abc") == "abc" + assert st.from_str("a b c") == "a\\ b\\ c" + assert st.from_str("a(b)c") == "a\\(b\\)c" + assert st.from_str("a+b+c") == "a\\+b\\+c" + assert st.from_str("test!") == "test\\!" + assert st.from_str("a\tb") == "a\\\tb" + + +def test_from_visibility() -> None: + assert st.from_visibility(Visibility.PRIVATE) == "private" + assert st.from_visibility(Visibility.PUBLIC) == "public" + + +def test_from_entity_typen() -> None: + assert st.from_entity_type(EntityType.project) == "Project" + assert st.from_entity_type(EntityType.group) == "Group" + assert st.from_entity_type(EntityType.user) == "User" + + +def test_from_datetime() -> None: + assert st.from_datetime(ref_date) == "2024-02-27T15\\:34\\:55Z" + assert st.from_datetime(ref_date2) == "2024-04-26T05\\:16\\:12Z" + + +def test_field_is() -> None: + assert st.field_is(FieldName("name"), st.from_str("Tadej")) == "name:Tadej" + + +def test_field_exists() -> None: + assert st.field_exists(FieldName("_type")) == "_type:[* TO *]" + + +def test_field_not_exists() -> None: + assert st.field_not_exists(FieldName("_type")) == "-_type:[* TO *]" + + +def test_field_is_any() -> None: + v = Nel.of(st.from_visibility(Visibility.PUBLIC), st.from_visibility(Visibility.PRIVATE)) + assert st.field_is_any(FieldName("visibility"), v) == "visibility:(public OR private)" + + v = Nel.of(st.from_str("hello")) + assert st.field_is_any(FieldName("name"), v) == "name:hello" + + +def test_id_is() -> None: + assert st.id_is("id12") == "id:id12" + assert st.id_is("id:121") == "id:id\\:121" + + +def test_id_in() -> None: + assert st.id_in(Nel.of("1", "2", "thre e")) == "id:(1 OR 2 OR thre\\ e)" + + +def test_id_not_exists() -> None: + assert st.id_not_exists() == "-id:[* TO *]" + + +def test_public_or_ids() -> None: + assert st.public_or_ids(["one", "id2"]) == "(visibility:public OR id:(one OR id2))" + assert st.public_or_ids(["id1"]) == "(visibility:public OR id:id1)" + + +def test_public_only() -> None: + assert st.public_only() == "visibility:public" + + +def test_all_entities() -> None: + assert st.all_entities() == "_kind:fullentity" + + +def test_created_is() -> None: + assert st.created_is(ref_date) == "creationDate:2024-02-27T15\\:34\\:55Z" + + +def test_created_range() -> None: + assert ( + st.created_range(ref_date, ref_date + timedelta(days=2)) + == "creationDate:[2024-02-27T15\\:34\\:55Z TO 2024-02-29T15\\:34\\:55Z]" + ) + + +def test_created_gt() -> None: + assert st.created_gt(ref_date) == "creationDate:[2024-02-27T15\\:34\\:55Z TO *]" + + +def test_created_lt() -> None: + assert st.created_lt(ref_date) == "creationDate:[* TO 2024-02-27T15\\:34\\:55Z]" + + +def test_fold_and() -> None: + assert ( + st.fold_and([st.public_only(), st.all_entities(), st.id_is("1234")]) + == "visibility:public AND _kind:fullentity AND id:1234" + ) + + +def test_fold_or() -> None: + assert ( + st.fold_or([st.public_only(), st.all_entities(), st.id_is("1234")]) + == "visibility:public OR _kind:fullentity OR id:1234" + ) + + +def test_created_by_exists() -> None: + assert st.created_by_exists() == "(createdBy:[* TO *] OR (*:* AND -_type:Project))" + + +def test_content_all() -> None: + assert st.content_all("abc") == "content_all:(abc~)" + assert st.content_all("a+b+c") == "content_all:(a\\+b\\+c~)" + assert st.content_all("ab cd") == "content_all:(ab~ cd~)" + assert st.content_all("ab cd") == "content_all:(ab~ cd~)" diff --git a/test/components/renku_data_services/search/test_solr_user_query.py b/test/components/renku_data_services/search/test_solr_user_query.py new file mode 100644 index 000000000..22c40d413 --- /dev/null +++ b/test/components/renku_data_services/search/test_solr_user_query.py @@ -0,0 +1,176 @@ +"""Tests for the solr_user_query module.""" + +from collections.abc import Iterable +from dataclasses import dataclass +from datetime import UTC, datetime, timedelta + +import pytest + +import renku_data_services.search.solr_token as st +from renku_data_services.authz.models import Role, Visibility +from renku_data_services.base_models.nel import Nel +from renku_data_services.search.solr_user_query import AuthAccess, Context, SolrUserQuery, _LuceneQueryTransform +from renku_data_services.search.solr_user_query import LuceneQueryInterpreter as L +from renku_data_services.search.user_query import ( + Created, + DateTimeCalc, + IdIs, + NameIs, + OrderBy, + PartialDate, + PartialDateTime, + RelativeDate, + Segment, + Segments, + SortableField, + Text, + UserQuery, +) +from renku_data_services.search.user_query import ( + Segments as S, +) +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.entity_schema import Fields +from renku_data_services.solr.solr_client import SortDirection + +ref_date: datetime = datetime(2024, 2, 27, 15, 34, 55, tzinfo=UTC) +ctx: Context = Context.for_anonymous(ref_date, UTC) + + +@dataclass +class TestAuthAccess(AuthAccess): + result: list[str] + + async def get_ids_for_role( + self, user_id: str, roles: Nel[Role], ets: Iterable[EntityType], direct_membership: bool + ) -> list[str]: + return self.result + + @classmethod + def of(cls, *args: str) -> AuthAccess: + return TestAuthAccess(list(args)) + + +def midnight(d: datetime) -> datetime: + return d.replace(hour=0, minute=0, second=0, microsecond=0) + + +def end_of_day(d: datetime) -> datetime: + return d.replace(hour=23, minute=59, second=59, microsecond=0) + + +async def to_solr(ctx: Context, seg: Segment) -> st.SolrToken: + v = _LuceneQueryTransform(ctx) + await seg.accept(v) + sq = await v.build() + return sq.query + + +def test_to_solr_sort() -> None: + assert _LuceneQueryTransform._to_solr_sort(OrderBy(field=SortableField.fname, direction=SortDirection.asc)) == ( + Fields.name, + SortDirection.asc, + ) + + +@pytest.mark.asyncio +async def test_from_term() -> None: + assert await to_solr(ctx, S.type_is(EntityType.project)) == st.field_is_any( + Fields.entity_type, Nel.of(st.from_entity_type(EntityType.project)) + ) + assert await to_solr(ctx, S.id_is("id1")) == st.field_is_any(Fields.id, Nel.of(st.from_str("id1"))) + assert await to_solr(ctx, S.name_is("Tadej")) == st.field_is_any(Fields.name, Nel.of(st.from_str("Tadej"))) + assert await to_solr(ctx, S.slug_is("a/b")) == st.field_is_any(Fields.slug, Nel.of(st.from_str("a/b"))) + assert await to_solr(ctx, S.visibility_is(Visibility.PUBLIC)) == st.field_is_any( + Fields.visibility, Nel.of(st.from_visibility(Visibility.PUBLIC)) + ) + assert await to_solr(ctx, S.keyword_is("k1", "w2")) == st.field_is_any( + Fields.keywords, Nel.of(st.from_str("k1"), st.from_str("w2")) + ) + assert await to_solr(ctx, S.namespace_is("ns12")) == st.field_is_any( + Fields.namespace_path, Nel.of(st.from_str("ns12")) + ) + assert await to_solr(ctx, S.created_by_is("12-34")) == st.field_is_any( + Fields.created_by, Nel.of(st.from_str("12-34")) + ) + + assert await to_solr(ctx, S.role_is(Role.OWNER)) == st.empty() + assert await to_solr( + ctx.with_user_role("user1").with_auth_access(TestAuthAccess.of("id1", "id2")), S.role_is(Role.OWNER) + ) == st.id_in(Nel.of("id1", "id2")) + assert await to_solr( + ctx.with_admin_role("user1").with_auth_access(TestAuthAccess.of("id1", "id2")), S.role_is(Role.OWNER) + ) == st.id_in(Nel.of("id1", "id2")) + + +@pytest.mark.asyncio +async def test_from_term_date() -> None: + assert await to_solr(ctx, Created.eq(PartialDateTime(PartialDate(2024)))) == st.created_range( + datetime(2024, 1, 1, tzinfo=UTC), datetime(2024, 12, 31, 23, 59, 59, tzinfo=UTC) + ) + assert await to_solr(ctx, Created.eq(PartialDateTime(PartialDate(2023, 8, 1)))) == st.created_range( + datetime(2023, 8, 1, tzinfo=UTC), datetime(2023, 8, 1, 23, 59, 59, tzinfo=UTC) + ) + assert await to_solr(ctx, Created.eq(RelativeDate.today)) == st.created_range( + midnight(ref_date), end_of_day(ref_date) + ) + + assert await to_solr(ctx, Created.lt(PartialDateTime(PartialDate(2024)))) == st.created_lt( + datetime(2024, 1, 1, tzinfo=UTC) + ) + assert await to_solr(ctx, Created.gt(PartialDateTime(PartialDate(2024)))) == st.created_gt( + datetime(2024, 12, 31, 23, 59, 59, tzinfo=UTC) + ) + assert await to_solr(ctx, Created.eq(DateTimeCalc(RelativeDate.today, 2, True))) == st.created_range( + midnight(ref_date - timedelta(days=2)), end_of_day(ref_date + timedelta(days=2)) + ) + assert await to_solr(ctx, Created.gt(DateTimeCalc(RelativeDate.today, -7, False))) == st.created_gt( + midnight(ref_date - timedelta(days=7)) + ) + + +@pytest.mark.asyncio +async def test_from_text() -> None: + assert await to_solr(ctx, Text("blah")) == st.content_all("blah") + assert await to_solr(ctx, Text("blah blah")) == st.content_all("blah blah") + + +@pytest.mark.asyncio +async def test_from_segment() -> None: + assert await to_solr(ctx, Text("blah")) == st.content_all("blah") + assert await to_solr(ctx, Created.gt(DateTimeCalc(RelativeDate.today, -7, False))) == st.created_gt( + midnight(ref_date - timedelta(days=7)) + ) + assert await to_solr(ctx, Created.eq(RelativeDate.today)) == st.created_range( + midnight(ref_date), end_of_day(ref_date) + ) + assert await to_solr(ctx, IdIs(Nel.of("id1"))) == st.field_is_any(Fields.id, Nel.of(st.from_str("id1"))) + assert await to_solr(ctx, NameIs(Nel.of("Tadej"))) == st.field_is_any(Fields.name, Nel.of(st.from_str("Tadej"))) + + +@pytest.mark.asyncio +async def test_interpreter_run() -> None: + ll = L() + assert await ll.run(ctx, UserQuery.of()) == SolrUserQuery(st.empty(), []) + assert await ll.run(ctx, UserQuery.of(Segments.keyword_is("data"), Segments.text("blah"))) == SolrUserQuery( + st.fold_and([st.field_is(Fields.keywords, st.from_str("data")), st.content_all("blah")]), [] + ) + assert await ll.run( + ctx, + UserQuery.of( + Segments.keyword_is("data"), + Segments.text("blah"), + Segments.sort_by((SortableField.score, SortDirection.desc)), + ), + ) == SolrUserQuery( + st.fold_and([st.field_is(Fields.keywords, st.from_str("data")), st.content_all("blah")]), + [(Fields.score, SortDirection.desc)], + ) + + +@pytest.mark.asyncio +async def test_interpreter_run_remove_empty() -> None: + ll = L() + assert await ll.run( + ctx, UserQuery.of(Segments.id_is("id1"), Segments.role_is(Role.OWNER), Segments.id_is("id2")) + ) == SolrUserQuery(st.SolrToken("id:id1 AND id:id2"), []) diff --git a/test/components/renku_data_services/search/test_user_query.py b/test/components/renku_data_services/search/test_user_query.py new file mode 100644 index 000000000..64858fbaa --- /dev/null +++ b/test/components/renku_data_services/search/test_user_query.py @@ -0,0 +1,210 @@ +"""Tests for user query.""" + +from datetime import UTC, datetime, timedelta + +import pytest +from ulid import ULID + +from renku_data_services.base_models.nel import Nel +from renku_data_services.search.user_query import ( + DateTimeCalc, + EmptyUserQueryVisitor, + FieldTerm, + Helper, + IdIs, + Order, + OrderBy, + PartialDate, + PartialDateTime, + PartialTime, + RelativeDate, + Segment, + Segments, + SortableField, + Text, + TypeIs, + UserQuery, +) +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.solr_client import SortDirection + +ref_date: datetime = datetime(2024, 2, 27, 15, 34, 55, tzinfo=UTC) + + +def midnight(d: datetime) -> datetime: + return d.replace(hour=0, minute=0, second=0, microsecond=0) + + +def end_of_day(d: datetime) -> datetime: + return d.replace(hour=23, minute=59, second=59, microsecond=0) + + +def test_render_keywords() -> None: + assert Segments.keyword_is("hello").render() == "keyword:hello" + assert Segments.keyword_is("hello-me").render() == "keyword:hello-me" + assert Segments.keyword_is("hello me").render() == 'keyword:"hello me"' + assert Segments.keyword_is("tl,dr", "data").render() == 'keyword:"tl,dr",data' + assert Segments.keyword_is("""a "and" b""", "data").render() == 'keyword:"a \\"and\\" b",data' + + +def test_render_order_by() -> None: + order = OrderBy(SortableField.fname, SortDirection.asc) + assert order.render() == "name-asc" + + +def test_render_order() -> None: + order = Order(Nel(OrderBy(SortableField.fname, SortDirection.asc))) + assert order.render() == "sort:name-asc" + + order = Order( + Nel.of(OrderBy(SortableField.fname, SortDirection.asc), OrderBy(SortableField.score, SortDirection.desc)), + ) + assert order.render() == "sort:name-asc,score-desc" + + +def test_helper_quote() -> None: + assert Helper.quote("hello world") == '"hello world"' + assert Helper.quote("hello ") == '"hello "' + assert Helper.quote("1,2") == '"1,2"' + assert Helper.quote('x="3"') == '"x=\\"3\\""' + assert Helper.quote("""a "and" b""") == '"a \\"and\\" b"' + + +def test_type_is() -> None: + ft = TypeIs(Nel.of(EntityType.project)) + assert ft.render() == "type:Project" + + ft = TypeIs(Nel.of(EntityType.project, EntityType.group)) + assert ft.render() == "type:Project,Group" + + +def test_id_is() -> None: + ft = IdIs(Nel.of("a b c")) + assert ft.render() == 'id:"a b c"' + + id = ULID() + ft = IdIs(Nel.of(str(id))) + assert ft.render() == f"id:{id}" + + +def test_free_text() -> None: + assert Segments.text("abc").render() == "abc" + assert Segments.text("abc abc").render() == "abc abc" + + +def test_partial_date_render() -> None: + assert PartialDate(2025, 2).render() == "2025-02" + assert PartialDate(2021).render() == "2021" + assert PartialDate(2025, 3, 7).render() == "2025-03-07" + + +def test_partial_date_min_max() -> None: + assert str(PartialDate(2025, 2).date_max()) == "2025-02-28" + assert str(PartialDate(2024, 2).date_max()) == "2024-02-29" + assert str(PartialDate(2025, 2).date_min()) == "2025-02-01" + assert str(PartialDate(2021).date_max()) == "2021-12-31" + assert str(PartialDate(2021).date_min()) == "2021-01-01" + assert str(PartialDate(2025, 3, 7).date_max()) == "2025-03-07" + assert str(PartialDate(2025, 3, 7).date_min()) == "2025-03-07" + + +def test_partial_time_render() -> None: + assert PartialTime(12).render() == "12" + assert PartialTime(12, 30).render() == "12:30" + assert PartialTime(12, 30, 15).render() == "12:30:15" + + +def test_partial_time_min_max() -> None: + assert str(PartialTime(12).time_max()) == "12:59:59" + assert str(PartialTime(12).time_min()) == "12:00:00" + assert str(PartialTime(2, 30).time_max()) == "02:30:59" + assert str(PartialTime(2, 30).time_min()) == "02:30:00" + assert str(PartialTime(12, 30, 15).time_max()) == "12:30:15" + assert str(PartialTime(12, 30, 15).time_min()) == "12:30:15" + + +def test_partial_datetime_render() -> None: + assert PartialDateTime(PartialDate(2022)).render() == "2022" + assert PartialDateTime(PartialDate(2022), PartialTime(8)).render() == "2022T08" + + +def test_relative_date() -> None: + assert RelativeDate.today.render() == "today" + assert RelativeDate.yesterday.render() == "yesterday" + + +def test_datetime_calc() -> None: + d = DateTimeCalc(PartialDateTime(PartialDate(2022)), 5, False) + assert d.render() == "2022+5d" + + d = DateTimeCalc(PartialDateTime(PartialDate(2022), PartialTime(8)), 5, False) + assert d.render() == "2022T08+5d" + + d = DateTimeCalc(PartialDateTime(PartialDate(2022, 5)), -5, False) + assert d.render() == "2022-05-5d" + + d = DateTimeCalc(RelativeDate.today, -7, False) + assert d.render() == "today-7d" + + d = DateTimeCalc(RelativeDate.yesterday, 7, False) + assert d.render() == "yesterday+7d" + + +def test_resolve_relative_date() -> None: + assert RelativeDate.today.resolve(ref_date, UTC) == (midnight(ref_date), end_of_day(ref_date)) + assert RelativeDate.yesterday.resolve(ref_date, UTC) == ( + midnight(ref_date) - timedelta(days=1), + end_of_day(ref_date) - timedelta(days=1), + ) + + +def test_resolve_partial_date() -> None: + pd = PartialDateTime(PartialDate(2023, 5)) + assert pd.resolve(ref_date, UTC) == (pd.datetime_min(UTC), pd.datetime_max(UTC)) + + pd = PartialDateTime(PartialDate(2023, 5, 1), PartialTime(15, 22, 15)) + assert pd.resolve(ref_date, UTC) == (pd.datetime_max(UTC), None) + + +def test_resolve_date_calc() -> None: + pd = PartialDateTime(PartialDate(2023, 5)) + calc = DateTimeCalc(pd, 5, False) + assert calc.resolve(ref_date, UTC) == (pd.datetime_min(UTC) + timedelta(days=5), None) + + calc = DateTimeCalc(pd, -5, False) + assert calc.resolve(ref_date, UTC) == (pd.datetime_min(UTC) - timedelta(days=5), None) + + calc = DateTimeCalc(pd, 5, True) + assert calc.resolve(ref_date, UTC) == ( + pd.datetime_min(UTC) - timedelta(days=5), + pd.datetime_max(UTC) + timedelta(days=5), + ) + + +class TestUserQueryTransform(EmptyUserQueryVisitor[UserQuery]): + def __init__(self, to_add: Segment) -> None: + self.segments: list[Segment] = [] + self.to_add = to_add + + async def visit_field_term(self, ft: FieldTerm) -> None: + self.segments.append(ft) + + async def visit_order(self, order: Order) -> None: + self.segments.append(order) + + async def visit_text(self, text: Text) -> None: + self.segments.append(text) + + async def build(self) -> UserQuery: + self.segments.append(self.to_add) + return UserQuery(self.segments) + + +@pytest.mark.asyncio +async def test_transform() -> None: + q0 = UserQuery.of(Segments.name_is("john"), Segments.text("help")) + q = await q0.transform( + TestUserQueryTransform(Segments.type_is(EntityType.project)), TestUserQueryTransform(Segments.id_is("id-123")) + ) + + assert q == UserQuery(q0.segments + [Segments.type_is(EntityType.project), Segments.id_is("id-123")]) diff --git a/test/components/renku_data_services/search/test_user_query_parser.py b/test/components/renku_data_services/search/test_user_query_parser.py new file mode 100644 index 000000000..d73b9d8dd --- /dev/null +++ b/test/components/renku_data_services/search/test_user_query_parser.py @@ -0,0 +1,480 @@ +"""Tests for the query parser.""" + +import datetime +import random +import string + +import pytest +from parsy import ParseError + +from renku_data_services.authz.models import Role, Visibility +from renku_data_services.base_models.nel import Nel +from renku_data_services.search.user_query import ( + Comparison, + Created, + CreatedByIs, + DateTimeCalc, + DirectMemberIs, + IdIs, + InheritedMemberIs, + KeywordIs, + NameIs, + NamespaceIs, + Order, + OrderBy, + PartialDate, + PartialDateTime, + PartialTime, + RelativeDate, + RoleIs, + SlugIs, + SortableField, + Text, + TypeIs, + UserId, + Username, + UserQuery, + VisibilityIs, +) +from renku_data_services.search.user_query import ( + Segments as S, +) +from renku_data_services.search.user_query_parser import QueryParser, _DateTimeParser, _ParsePrimitives +from renku_data_services.solr.entity_documents import EntityType +from renku_data_services.solr.solr_client import SortDirection + +pp = _ParsePrimitives() + + +def test_user_name() -> None: + assert pp.user_name.parse("@hello") == Username.from_name("hello") + assert pp.user_name.parse("@test.me") == Username.from_name("test.me") + with pytest.raises(ParseError): + pp.user_name.parse("help") + with pytest.raises(ParseError): + pp.user_name.parse("@t - a") + + +def test_inherited_member_is() -> None: + assert pp.inherited_member_is.parse("inherited_member:@hello") == InheritedMemberIs( + Nel(Username.from_name("hello")) + ) + assert pp.inherited_member_is.parse("inherited_member:hello") == InheritedMemberIs(Nel(UserId("hello"))) + + +def test_direct_member_is() -> None: + assert pp.direct_member_is.parse("direct_member:@hello") == DirectMemberIs(Nel(Username.from_name("hello"))) + assert pp.direct_member_is.parse("direct_member:hello") == DirectMemberIs(Nel(UserId("hello"))) + + +def test_sortable_field() -> None: + for field in SortableField._member_map_.values(): + assert pp.sortable_field.parse(field.value) == field + + with pytest.raises(ParseError): + pp.sortable_field.parse("") + with pytest.raises(ParseError): + pp.sortable_field.parse("abc") + + +def test_sort_direction() -> None: + for field in SortDirection._member_map_.values(): + assert pp.sort_direction.parse(field.value) == field + with pytest.raises(ParseError): + pp.sort_direction.parse("") + with pytest.raises(ParseError): + pp.sort_direction.parse("abc") + + +def test_order_by() -> None: + for sf in SortableField._member_map_.values(): + for dir in SortDirection._member_map_.values(): + value = OrderBy(sf, dir) # type: ignore + assert pp.ordered_by.parse(value.render()) == value + with pytest.raises(ParseError): + pp.ordered_by.parse("") + with pytest.raises(ParseError): + pp.ordered_by.parse("name") + with pytest.raises(ParseError): + pp.ordered_by.parse("name-") + with pytest.raises(ParseError): + pp.ordered_by.parse("name-abc") + with pytest.raises(ParseError): + pp.ordered_by.parse("name - desc") + + +def test_sort_term() -> None: + assert pp.sort_term.parse("sort:name-desc") == Order(Nel(OrderBy(SortableField.fname, SortDirection.desc))) + assert pp.sort_term.parse("sort:score-asc,name-desc") == Order( + Nel.of(OrderBy(SortableField.score, SortDirection.asc), OrderBy(SortableField.fname, SortDirection.desc)) + ) + + +def test_entity_type() -> None: + for field in EntityType._member_map_.values(): + assert pp.entity_type.parse(field.value) == field + assert pp.entity_type.parse(field.value.lower()) == field + assert pp.entity_type.parse(field.value.upper()) == field + + +def test_entity_type_nel() -> None: + value = EntityType.project.value + assert pp.entity_type_nel.parse(value) == Nel(EntityType.project) + + value = "Project,Group" + assert pp.entity_type_nel.parse(value) == Nel.of(EntityType.project, EntityType.group) + + +def test_order_by_nel() -> None: + value = "name-asc,created-desc" + assert pp.ordered_by_nel.parse(value) == Nel.of( + OrderBy(SortableField.fname, SortDirection.asc), + OrderBy(SortableField.created, SortDirection.desc), + ) + value = "name-asc, created-desc" + assert pp.ordered_by_nel.parse(value) == Nel.of( + OrderBy(SortableField.fname, SortDirection.asc), + OrderBy(SortableField.created, SortDirection.desc), + ) + + value = "created-desc" + assert pp.ordered_by_nel.parse(value) == Nel( + OrderBy(SortableField.created, SortDirection.desc), + ) + + with pytest.raises(ParseError): + pp.ordered_by_nel.parse("") + + +def test_comparisons() -> None: + assert pp.is_equal.parse(":") == Comparison.is_equal + assert pp.is_gt.parse(">") == Comparison.is_greater_than + assert pp.is_lt.parse("<") == Comparison.is_lower_than + + +def test_type_is() -> None: + assert pp.type_is.parse("type:Project") == TypeIs(Nel(EntityType.project)) + assert pp.type_is.parse("type:Project,Group") == TypeIs(Nel.of(EntityType.project, EntityType.group)) + assert pp.type_is.parse("type:Project, Group") == TypeIs(Nel.of(EntityType.project, EntityType.group)) + + +def test_visibilty_is() -> None: + assert pp.visibility_is.parse("visibility:public") == VisibilityIs(Nel.of(Visibility.PUBLIC)) + assert pp.visibility_is.parse("visibility:private") == VisibilityIs(Nel.of(Visibility.PRIVATE)) + assert pp.visibility_is.parse("visibility:Public") == VisibilityIs(Nel.of(Visibility.PUBLIC)) + assert pp.visibility_is.parse("visibility:Private") == VisibilityIs(Nel.of(Visibility.PRIVATE)) + + +def test_created() -> None: + assert pp.created.parse("createdtoday") == Created(Comparison.is_greater_than, Nel.of(RelativeDate.today)) + assert pp.created.parse("created:today") == Created(Comparison.is_equal, Nel.of(RelativeDate.today)) + assert pp.created.parse("created>today-7d") == Created( + Comparison.is_greater_than, Nel.of(DateTimeCalc(RelativeDate.today, -7, False)) + ) + + +def test_role_is() -> None: + assert pp.role_is.parse("role:owner") == RoleIs(Nel(Role.OWNER)) + assert pp.role_is.parse("role:viewer") == RoleIs(Nel(Role.VIEWER)) + assert pp.role_is.parse("role:editor") == RoleIs(Nel(Role.EDITOR)) + assert pp.role_is.parse("role:viewer,editor") == RoleIs(Nel.of(Role.VIEWER, Role.EDITOR)) + + +def test_field_term() -> None: + assert pp.field_term.parse("created None: + assert pp.free_text.parse("just") == Text("just") + + with pytest.raises(ParseError): + pp.free_text.parse("") + + with pytest.raises(ParseError): + pp.free_text.parse('"hello world"') + + +def test_segment() -> None: + assert pp.segment.parse("abcdefg") == Text("abcdefg") + assert pp.segment.parse("createdtoday-7d") == Created( + Comparison.is_greater_than, Nel(DateTimeCalc(RelativeDate.today, -7, False)) + ) + assert pp.segment.parse("visibility:public") == VisibilityIs(Nel.of(Visibility.PUBLIC)) + assert pp.segment.parse("type:Project") == TypeIs(Nel(EntityType.project)) + assert pp.segment.parse("name:test") == NameIs(Nel("test")) + assert pp.segment.parse("slug:test") == SlugIs(Nel("test")) + assert pp.segment.parse("id:test") == IdIs(Nel("test")) + assert pp.segment.parse("keyword:test") == KeywordIs(Nel("test")) + assert pp.segment.parse("namespace:test") == NamespaceIs(Nel("test")) + assert pp.segment.parse("createdBy:test") == CreatedByIs(Nel("test")) + assert pp.segment.parse("direct_member:@john") == DirectMemberIs(Nel(Username.from_name("john"))) + assert pp.segment.parse("direct_member:123-456") == DirectMemberIs(Nel(UserId("123-456"))) + assert pp.segment.parse("inherited_member:@john") == InheritedMemberIs(Nel(Username.from_name("john"))) + assert pp.segment.parse("inherited_member:123-456") == InheritedMemberIs(Nel(UserId("123-456"))) + + assert pp.segment.parse("name:") == Text("name:") + + +@pytest.mark.asyncio +async def test_query() -> None: + assert pp.query.parse("") == UserQuery([]) + + q = UserQuery.of( + S.created(Comparison.is_greater_than, DateTimeCalc(RelativeDate.today, -7, False)), + S.text("some"), + S.slug_is("bad slug"), + S.text("text"), + S.order(OrderBy(SortableField.score, SortDirection.asc)), + ) + qstr = 'created>today-7d some slug:"bad slug" text sort:score-asc' + assert pp.query.parse(qstr) == q + assert q.render() == qstr + + q = UserQuery( + [ + S.name_is("al"), + S.text("hello world hello"), + S.sort_by((SortableField.score, SortDirection.desc)), + ] + ) + qstr = "name:al hello world hello sort:score-desc" + assert await QueryParser.parse(qstr) == q + assert q.render() == qstr + + +@pytest.mark.asyncio +async def test_collapse_member_and_text_query() -> None: + q = UserQuery.of( + S.name_is("al"), + S.text("hello this world"), + S.direct_member_is(Username.from_name("jane"), Username.from_name("joe")), + ) + qstr = "name:al hello direct_member:@jane this world direct_member:@joe" + assert await QueryParser.parse(qstr) == q + assert q.render() == "name:al hello this world direct_member:@jane,@joe" + + +@pytest.mark.asyncio +async def test_restrict_members_query() -> None: + q = UserQuery.of( + S.name_is("al"), + S.text("hello"), + S.direct_member_is( + Username.from_name("jane"), Username.from_name("joe"), Username.from_name("jeff"), UserId("123") + ), + ) + qstr = "name:al direct_member:@jane hello direct_member:@joe,@jeff,123,456,@wuff" + assert (await QueryParser.parse(qstr)) == q + + +@pytest.mark.asyncio +async def test_invalid_query() -> None: + result = await QueryParser.parse("type:uu:ue:") + assert result == UserQuery([Text("type:uu:ue:")]) + + +@pytest.mark.asyncio +async def test_random_query() -> None: + """Any random string must parse successfully.""" + rlen = random.randint(0, 50) + rstr = "".join(random.choices(string.printable, k=rlen)) + await QueryParser.parse(rstr) + + +def test_string_basic() -> None: + assert pp.string_basic.parse("abcde") == "abcde" + assert pp.string_basic.parse("project_one") == "project_one" + + with pytest.raises(ParseError): + pp.string_basic.parse("a b") + with pytest.raises(ParseError): + pp.string_basic.parse("a,b") + with pytest.raises(ParseError): + pp.string_basic.parse('a"b"') + + +def test_string_quoted() -> None: + assert pp.string_quoted.parse('"abc"') == "abc" + assert pp.string_quoted.parse('"a b c"') == "a b c" + assert pp.string_quoted.parse('"a,b,c"') == "a,b,c" + assert pp.string_quoted.parse('"a and \\"b\\" and c"') == 'a and "b" and c' + + +def test_string_value() -> None: + assert pp.string_value.parse("abc") == "abc" + assert pp.string_value.parse('"a b c"') == "a b c" + assert pp.string_value.parse('"a,b,c"') == "a,b,c" + assert pp.string_value.parse('"a and \\"b\\" and c"') == 'a and "b" and c' + + +def test_string_values() -> None: + assert pp.string_values.parse("a,b") == Nel.of("a", "b") + assert pp.string_values.parse('a,"b c",d') == Nel.of("a", "b c", "d") + + +dp = _DateTimeParser() + + +def test_year() -> None: + assert dp.year.parse("2022") == 2022 + assert dp.year.parse("1955") == 1955 + + with pytest.raises(ParseError): + dp.year.parse("098") + + with pytest.raises(ParseError): + dp.year.parse("abc") + + with pytest.raises(ParseError): + dp.year.parse("80") + + with pytest.raises(ParseError): + dp.year.parse("8") + + +def test_month() -> None: + assert dp.month.parse("1") == 1 + assert dp.month.parse("01") == 1 + assert dp.month.parse("12") == 12 + assert dp.month.parse("8") == 8 + + with pytest.raises(ParseError): + dp.month.parse("0") + with pytest.raises(ParseError): + dp.month.parse("-1") + with pytest.raises(ParseError): + dp.month.parse("15") + with pytest.raises(ParseError): + dp.month.parse("13") + + +def test_day() -> None: + assert dp.dom.parse("1") == 1 + assert dp.dom.parse("01") == 1 + assert dp.dom.parse("12") == 12 + assert dp.dom.parse("8") == 8 + assert dp.dom.parse("31") == 31 + + with pytest.raises(ParseError): + dp.dom.parse("0") + with pytest.raises(ParseError): + dp.dom.parse("-1") + with pytest.raises(ParseError): + dp.dom.parse("32") + + +def test_hour() -> None: + assert dp.hour.parse("1") == 1 + assert dp.hour.parse("01") == 1 + assert dp.hour.parse("0") == 0 + assert dp.hour.parse("8") == 8 + assert dp.hour.parse("23") == 23 + + with pytest.raises(ParseError): + dp.hour.parse("24") + with pytest.raises(ParseError): + dp.hour.parse("-1") + with pytest.raises(ParseError): + dp.hour.parse("abc") + + +def test_minsec() -> None: + assert dp.minsec.parse("1") == 1 + assert dp.minsec.parse("01") == 1 + assert dp.minsec.parse("0") == 0 + assert dp.minsec.parse("00") == 0 + assert dp.minsec.parse("8") == 8 + assert dp.minsec.parse("59") == 59 + + with pytest.raises(ParseError): + dp.minsec.parse("60") + with pytest.raises(ParseError): + dp.minsec.parse("-1") + with pytest.raises(ParseError): + dp.minsec.parse("abc") + + +def test_partial_date() -> None: + assert dp.partial_date.parse("2022") == PartialDate(2022) + assert dp.partial_date.parse("2024-05") == PartialDate(2024, 5, None) + + with pytest.raises(ParseError): + dp.partial_date.parse("2020-05-01T08:00") + with pytest.raises(ParseError): + dp.partial_date.parse("-05-01") + with pytest.raises(ParseError): + dp.partial_date.parse("05-01") + with pytest.raises(ParseError): + dp.partial_date.parse("2023-15-01") + + +def test_partial_time() -> None: + assert dp.partial_time.parse("08:10") == PartialTime(8, 10, None) + assert dp.partial_time.parse("08") == PartialTime(8) + assert dp.partial_time.parse("8") == PartialTime(8) + assert dp.partial_time.parse("8:5") == PartialTime(8, 5, None) + assert dp.partial_time.parse("08:55:10") == PartialTime(8, 55, 10) + + with pytest.raises(ParseError): + dp.partial_time.parse("2020-05-01T08:00") + with pytest.raises(ParseError): + dp.partial_time.parse("56:56") + with pytest.raises(ParseError): + dp.partial_time.parse("000:15") + + +def test_parital_datetime() -> None: + assert dp.partial_datetime.parse("2022") == PartialDateTime(PartialDate(2022)) + assert dp.partial_datetime.parse("2024-05") == PartialDateTime(PartialDate(2024, 5, None)) + assert dp.partial_datetime.parse("2024-05T8") == PartialDateTime(PartialDate(2024, 5), PartialTime(8)) + assert dp.partial_datetime.parse("2025-03-01T12Z") == PartialDateTime( + PartialDate(2025, 3, 1), PartialTime(12), datetime.UTC + ) + + +def test_relative_date() -> None: + assert dp.relative_date.parse("today") == RelativeDate.today + assert dp.relative_date.parse("yesterday") == RelativeDate.yesterday + + +def test_datetime_calc() -> None: + assert dp.datetime_calc.parse("2022-05+10d") == DateTimeCalc(PartialDateTime(PartialDate(2022, 5)), 10, False) + assert dp.datetime_calc.parse("today-5d") == DateTimeCalc(RelativeDate.today, -5, False) + assert dp.datetime_calc.parse("yesterday/8D") == DateTimeCalc(RelativeDate.yesterday, 8, True) + + with pytest.raises(ParseError): + dp.datetime_calc.parse("today+-10d") + with pytest.raises(ParseError): + dp.datetime_calc.parse("today/-10d") + + +def test_datetime_ref() -> None: + assert dp.datetime_ref.parse("2022-05+10d") == DateTimeCalc(PartialDateTime(PartialDate(2022, 5)), 10, False) + assert dp.datetime_ref.parse("today-5d") == DateTimeCalc(RelativeDate.today, -5, False) + assert dp.datetime_ref.parse("yesterday/8D") == DateTimeCalc(RelativeDate.yesterday, 8, True) + + assert dp.datetime_ref.parse("today") == RelativeDate.today + assert dp.datetime_ref.parse("yesterday") == RelativeDate.yesterday + + assert dp.datetime_ref.parse("2022") == PartialDateTime(PartialDate(2022)) + assert dp.datetime_ref.parse("2024-05") == PartialDateTime(PartialDate(2024, 5, None)) + assert dp.datetime_ref.parse("2024-05T8") == PartialDateTime(PartialDate(2024, 5), PartialTime(8)) + assert dp.datetime_ref.parse("2025-03-01T12Z") == PartialDateTime( + PartialDate(2025, 3, 1), PartialTime(12), datetime.UTC + ) diff --git a/test/components/renku_data_services/search/test_user_query_process.py b/test/components/renku_data_services/search/test_user_query_process.py new file mode 100644 index 000000000..3f762338a --- /dev/null +++ b/test/components/renku_data_services/search/test_user_query_process.py @@ -0,0 +1,76 @@ +"""Tests for user query processing.""" + +import pytest + +from renku_data_services.search.user_query import Segments as S +from renku_data_services.search.user_query import UserId, Username, UserQuery +from renku_data_services.search.user_query_process import ( + CollapseMembers, + CollapseText, + CollectEntityTypes, +) +from renku_data_services.solr.entity_documents import EntityType + + +@pytest.mark.asyncio +async def test_find_entity_types() -> None: + q = UserQuery.of(S.keyword_is("science"), S.name_is("test")) + assert await q.accept(CollectEntityTypes()) is None + + q = UserQuery.of(S.keyword_is("science"), S.type_is(EntityType.project), S.name_is("test")) + assert await q.accept(CollectEntityTypes()) == set([EntityType.project]) + + q = UserQuery.of( + S.keyword_is("science"), + S.type_is(EntityType.project, EntityType.dataconnector), + S.name_is("test"), + ) + assert await q.accept(CollectEntityTypes()) == set([EntityType.project, EntityType.dataconnector]) + + q = UserQuery.of( + S.keyword_is("science"), + S.type_is(EntityType.project), + S.type_is(EntityType.dataconnector), + S.name_is("test"), + ) + assert await q.accept(CollectEntityTypes()) == set() + + +@pytest.mark.asyncio +async def test_collapse_text_segments() -> None: + q = UserQuery.of( + S.name_is("john"), + S.text("hello"), + S.text("world"), + S.keyword_is("check"), + S.text("help"), + ) + assert await q.accept(CollapseText()) == UserQuery.of( + S.name_is("john"), + S.text("hello world"), + S.keyword_is("check"), + S.text("help"), + ) + + +@pytest.mark.asyncio +async def test_restrict_members_query() -> None: + q = UserQuery.of( + S.name_is("al"), + S.direct_member_is(Username.from_name("jane")), + S.text("hello"), + S.direct_member_is( + Username.from_name("joe"), + Username.from_name("jeff"), + UserId("123"), + UserId("456"), + Username.from_name("wuff"), + ), + ) + assert await q.transform(CollapseMembers()) == UserQuery.of( + S.name_is("al"), + S.text("hello"), + S.direct_member_is( + Username.from_name("jane"), Username.from_name("joe"), Username.from_name("jeff"), UserId("123") + ), + ) diff --git a/test/components/renku_data_services/solr/test_entity_documents.py b/test/components/renku_data_services/solr/test_entity_documents.py new file mode 100644 index 000000000..2485154c8 --- /dev/null +++ b/test/components/renku_data_services/solr/test_entity_documents.py @@ -0,0 +1,287 @@ +from datetime import datetime + +from ulid import ULID + +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.core import Slug +from renku_data_services.solr.entity_documents import DataConnector, Group, Project, User + +user_jan_ullrich = User.of(id="abc-def", firstName="Jan", lastName="Ullrich", slug=Slug("janu")) +user_tadej_pogacar = User.of(id="hij-klm", firstName="Tadej", lastName="Pogačar", slug=Slug("tadejp")) +group_team = Group.of(id=ULID(), name="The Team", slug=Slug("the-team"), description="A group consisting of a team") +project_ai_stuff = Project( + id=ULID(), + name="AI stuff", + slug=Slug("the-p1"), + path="the-p1", + namespace_path=user_jan_ullrich.path, + visibility=Visibility.PUBLIC, + createdBy=user_jan_ullrich.id, + creationDate=datetime(year=2025, month=1, day=31, hour=9, minute=47, second=44), +).in_namespace(user_jan_ullrich) + +dc_one = DataConnector( + id=ULID(), + readonly=True, + storageType="s3", + name="qq dc one", + slug=Slug("dc-xy1"), + path="dc-xy1", + visibility=Visibility.PUBLIC, + createdBy=user_jan_ullrich.id, + creationDate=datetime(year=2025, month=4, day=10, hour=16, minute=14, second=4), + description="Bad data is filtered out.", +).in_namespace(user_jan_ullrich) + +dc_global = DataConnector( + id=ULID(), + readonly=True, + storageType="s3", + name="qq dc global", + slug=Slug("dc-global-1"), + path="dc-global-1", + visibility=Visibility.PUBLIC, + createdBy=user_jan_ullrich.id, + creationDate=datetime(year=2025, month=5, day=9, hour=11, minute=14, second=4), + description="This is for all of us.", +).in_namespace(None) + + +def test_dc_project_keywords_sort() -> None: + dc = DataConnector.from_dict( + { + "id": str(dc_one.id), + "slug": "dc-xy1", + "name": "qq dc one", + "readonly": True, + "storageType": "s3", + "namespacePath": user_jan_ullrich.path, + "path": f"{user_jan_ullrich.path}/dc-xy1", + "createdBy": user_jan_ullrich.id, + "creationDate": "2025-04-10T16:14:04Z", + "description": "Bad data is filtered out.", + "keywords": ["z", "a", "b"], + "visibility": "public", + "_kind": "fullentity", + "_type": "DataConnector", + "_version_": -1, + } + ) + assert dc.keywords == ["a", "b", "z"] + + p = Project.from_dict( + { + "id": str(project_ai_stuff.id), + "name": "AI stuff", + "slug": "the-p1", + "namespacePath": user_jan_ullrich.path, + "path": f"{user_jan_ullrich.path}/the-p1", + "visibility": "public", + "createdBy": "abc-def", + "creationDate": "2025-01-31T09:47:44Z", + "_type": "Project", + "keywords": ["z", "b", "a"], + "_kind": "fullentity", + "_version_": -1, + } + ) + assert p.keywords == ["a", "b", "z"] + + +def test_dc_global_dict(): + assert dc_global.to_dict() == { + "id": str(dc_global.id), + "slug": "dc-global-1", + "name": "qq dc global", + "path": "dc-global-1", + "isNamespace": False, + "readonly": True, + "storageType": "s3", + "createdBy": user_jan_ullrich.id, + "creationDate": "2025-05-09T11:14:04Z", + "description": "This is for all of us.", + "keywords": [], + "visibility": "public", + "_kind": "fullentity", + "_type": "DataConnector", + "_version_": -1, + } + + +def test_read_dc_global_from_dict(): + dc = DataConnector.from_dict( + { + "id": str(dc_global.id), + "slug": "dc-global-1", + "name": "qq dc global", + "path": "dc-global-1", + "readonly": True, + "storageType": "s3", + "createdBy": user_jan_ullrich.id, + "creationDate": "2025-05-09T11:14:04Z", + "description": "This is for all of us.", + "keywords": [], + "visibility": "public", + "_kind": "fullentity", + "_type": "DataConnector", + "_version_": -1, + } + ) + assert dc == dc_global + + +def test_dc_dict(): + assert dc_one.to_dict() == { + "id": str(dc_one.id), + "slug": "dc-xy1", + "path": f"{user_jan_ullrich.path}/dc-xy1", + "name": "qq dc one", + "readonly": True, + "storageType": "s3", + "namespacePath": user_jan_ullrich.path, + "createdBy": user_jan_ullrich.id, + "creationDate": "2025-04-10T16:14:04Z", + "description": "Bad data is filtered out.", + "keywords": [], + "visibility": "public", + "isNamespace": False, + "_kind": "fullentity", + "_type": "DataConnector", + "_version_": -1, + } + + +def test_read_dc_from_dict(): + dc = DataConnector.from_dict( + { + "id": str(dc_one.id), + "slug": "dc-xy1", + "name": "qq dc one", + "readonly": True, + "storageType": "s3", + "namespacePath": user_jan_ullrich.path, + "path": f"{user_jan_ullrich.path}/dc-xy1", + "createdBy": user_jan_ullrich.id, + "creationDate": "2025-04-10T16:14:04Z", + "description": "Bad data is filtered out.", + "keywords": [], + "visibility": "public", + "_kind": "fullentity", + "_type": "DataConnector", + "_version_": -1, + } + ) + assert dc == dc_one + + +def test_user_dict(): + assert user_jan_ullrich.to_dict() == { + "id": "abc-def", + "slug": "janu", + "path": "janu", + "firstName": "Jan", + "lastName": "Ullrich", + "_type": "User", + "_kind": "fullentity", + "visibility": "public", + "isNamespace": True, + "_version_": -1, + } + assert user_tadej_pogacar.to_dict() == { + "id": "hij-klm", + "slug": "tadejp", + "path": "tadejp", + "firstName": "Tadej", + "lastName": "Pogačar", + "_type": "User", + "_kind": "fullentity", + "visibility": "public", + "isNamespace": True, + "_version_": -1, + } + + +def test_read_user_dict(): + u1 = { + "id": "abc-def", + "path": "janu", + "slug": "janu", + "firstName": "Jan", + "lastName": "Ullrich", + "_type": "User", + "_kind": "fullentity", + "visibility": "public", + "_version_": -1, + } + u = User.from_dict(u1) + assert u == user_jan_ullrich + + +def test_group_dict(): + assert group_team.to_dict() == { + "id": str(group_team.id), + "name": "The Team", + "slug": "the-team", + "path": "the-team", + "description": "A group consisting of a team", + "_type": "Group", + "_kind": "fullentity", + "visibility": "public", + "isNamespace": True, + "_version_": -1, + } + + +def test_read_group_dict(): + g = Group.from_dict( + { + "id": str(group_team.id), + "name": "The Team", + "path": "the-team", + "slug": "the-team", + "description": "A group consisting of a team", + "_type": "Group", + "_kind": "fullentity", + "visibility": "public", + "_version_": -1, + } + ) + assert g == group_team + + +def test_project_dict(): + assert project_ai_stuff.to_dict() == { + "id": str(project_ai_stuff.id), + "name": "AI stuff", + "slug": "the-p1", + "path": f"{user_jan_ullrich.path}/the-p1", + "namespacePath": user_jan_ullrich.path, + "visibility": "public", + "repositories": [], + "keywords": [], + "createdBy": "abc-def", + "creationDate": "2025-01-31T09:47:44Z", + "_type": "Project", + "_kind": "fullentity", + "isNamespace": True, + "_version_": -1, + } + + +def test_read_project_dict(): + p = Project.from_dict( + { + "id": str(project_ai_stuff.id), + "name": "AI stuff", + "slug": "the-p1", + "namespacePath": user_jan_ullrich.path, + "path": f"{user_jan_ullrich.path}/the-p1", + "visibility": "public", + "createdBy": "abc-def", + "creationDate": "2025-01-31T09:47:44Z", + "_type": "Project", + "_kind": "fullentity", + "_version_": -1, + } + ) + assert p == project_ai_stuff diff --git a/test/components/renku_data_services/solr/test_solr_client.py b/test/components/renku_data_services/solr/test_solr_client.py new file mode 100644 index 000000000..38e3f689c --- /dev/null +++ b/test/components/renku_data_services/solr/test_solr_client.py @@ -0,0 +1,361 @@ +import json +import random +import string + +import pytest + +from renku_data_services.solr.entity_documents import Group, Project, User +from renku_data_services.solr.entity_schema import Fields, FieldTypes +from renku_data_services.solr.solr_client import ( + DefaultSolrAdminClient, + DefaultSolrClient, + FacetArbitraryRange, + FacetBuckets, + FacetCount, + FacetRange, + FacetTerms, + SolrBucketFacetResponse, + SolrClientConfig, + SolrClientCreateCoreException, + SolrFacets, + SolrQuery, + SortDirection, + SubQuery, + UpsertResponse, + UpsertSuccess, +) +from renku_data_services.solr.solr_schema import AddCommand, Field, FieldName, SchemaCommandList +from test.components.renku_data_services.solr import test_entity_documents + + +def assert_upsert_result(r: UpsertResponse): + match r: + case UpsertSuccess() as s: + assert s.header.status == 0 + assert s.header.query_time > 0 + case _: + raise Exception(f"Unexpected result: {r}") + + +def test_facet_terms() -> None: + ft = FacetTerms(name=FieldName("types"), field=Fields.entity_type) + assert ft.to_dict() == { + "types": { + "type": "terms", + "field": "_type", + "missing": False, + "numBuckets": False, + "allBuckets": False, + } + } + ft = FacetTerms(name=FieldName("cat"), field=FieldName("category"), limit=100) + assert ft.to_dict() == { + "cat": { + "type": "terms", + "field": "category", + "limit": 100, + "missing": False, + "numBuckets": False, + "allBuckets": False, + } + } + + +def test_facet_range() -> None: + fr = FacetArbitraryRange( + name=FieldName("stars"), + field=FieldName("stars"), + ranges=[FacetRange(start="*", to=100), FacetRange(start=100, to=200), FacetRange(start=200, to="*")], + ) + assert fr.to_dict() == { + "stars": { + "type": "range", + "field": "stars", + "ranges": [{"from": "*", "to": 100}, {"from": 100, "to": 200}, {"from": 200, "to": "*"}], + } + } + + +def test_solr_facets() -> None: + fc = SolrFacets.of( + FacetTerms(name=FieldName("types"), field=Fields.entity_type), + FacetArbitraryRange( + name=FieldName("stars"), + field=FieldName("stars"), + ranges=[FacetRange(start="*", to=100), FacetRange(start=100, to=200), FacetRange(start=200, to="*")], + ), + ) + assert fc.to_dict() == { + "stars": { + "type": "range", + "field": "stars", + "ranges": [{"from": "*", "to": 100}, {"from": 100, "to": 200}, {"from": 200, "to": "*"}], + }, + "types": { + "type": "terms", + "field": "_type", + "missing": False, + "numBuckets": False, + "allBuckets": False, + }, + } + + +def test_facet_buckets() -> None: + fb = FacetBuckets( + buckets=[FacetCount(field=FieldName("electronic"), count=5), FacetCount(field=FieldName("garden"), count=10)] + ) + assert fb.to_dict() == {"buckets": [{"val": "electronic", "count": 5}, {"val": "garden", "count": 10}]} + + fb_str = """{ + "buckets":[ + {"val":"electronics", "count":12}, + {"val":"currency", "count":4}, + {"val":"memory", "count":3} + ] + }""" + assert FacetBuckets.model_validate_json(fb_str) == FacetBuckets( + buckets=[ + FacetCount(field=FieldName("electronics"), count=12), + FacetCount(field=FieldName("currency"), count=4), + FacetCount(field=FieldName("memory"), count=3), + ] + ) + + +def test_serialize_solr_query(): + q = SolrQuery.query_all_fields("name:hello") + assert q.to_dict() == {"query": "name:hello", "fields": ["*", "score"], "sort": ""} + + q = SolrQuery.query_all_fields("name:hello").with_sort([(Fields.name, SortDirection.asc)]) + assert q.to_dict() == {"query": "name:hello", "fields": ["*", "score"], "sort": "name asc"} + + q = SolrQuery.query_all_fields("name:hello").with_sort( + [(Fields.name, SortDirection.asc), (Fields.creation_date, SortDirection.desc)] + ) + assert q.to_dict() == {"query": "name:hello", "fields": ["*", "score"], "sort": "name asc,creationDate desc"} + + q = ( + SolrQuery.query_all_fields("name:test help") + .with_facet(FacetTerms(name=FieldName("type"), field=FieldName("_type"))) + .add_sub_query(FieldName("details"), SubQuery(query="test", filter="", limit=1)) + .with_sort([(Fields.name, SortDirection.asc)]) + ) + assert q.to_dict() == { + "query": "name:test help", + "fields": ["*", "score", "details:[subquery]"], + "sort": "name asc", + "params": {"details.q": "test", "details.limit": "1"}, + "facet": { + "type": {"type": "terms", "field": "_type", "missing": False, "numBuckets": False, "allBuckets": False} + }, + } + + +def test_solr_bucket_facet_response() -> None: + respones_str = """{ + "count":32, + "categories":{ + "buckets":[ + {"val":"electronics", "count":12}, + {"val":"currency", "count":4}, + {"val":"memory", "count":3} + ] + }, + "memories":{ + "buckets":[ + {"val":"bike", "count":2}, + {"val":"chair", "count":4}, + {"val":"memory", "count":6} + ] + } + }""" + fr = SolrBucketFacetResponse.model_validate_json(respones_str) + expected = SolrBucketFacetResponse( + count=32, + buckets={ + FieldName("categories"): FacetBuckets( + buckets=[ + FacetCount(field=FieldName("electronics"), count=12), + FacetCount(field=FieldName("currency"), count=4), + FacetCount(field=FieldName("memory"), count=3), + ] + ), + FieldName("memories"): FacetBuckets( + buckets=[ + FacetCount(field=FieldName("bike"), count=2), + FacetCount(field=FieldName("chair"), count=4), + FacetCount(field=FieldName("memory"), count=6), + ] + ), + }, + ) + assert fr == expected + assert expected.to_dict() == json.loads(respones_str) + + +@pytest.mark.asyncio +async def test_insert_and_query_project(solr_search): + async with DefaultSolrClient(solr_search) as client: + p = test_entity_documents.project_ai_stuff + r1 = await client.upsert([p]) + assert_upsert_result(r1) + + qr = await client.query(SolrQuery.query_all_fields(f"id:{str(p.id)}")) + assert qr.responseHeader.status == 0 + assert qr.response.num_found == 1 + assert len(qr.response.docs) == 1 + + doc = Project.model_validate(qr.response.docs[0]) + assert doc.id == p.id + assert doc.name == p.name + assert doc.score is not None + assert doc.score > 0 + + +@pytest.mark.asyncio +async def test_insert_and_query_user(solr_search): + async with DefaultSolrClient(solr_search) as client: + u1 = test_entity_documents.user_tadej_pogacar + u2 = test_entity_documents.user_jan_ullrich + r1 = await client.upsert([u1, u2]) + assert_upsert_result(r1) + + qr = await client.query( + SolrQuery.query_all_fields("_type:User").with_sort([(Fields.first_name, SortDirection.asc)]) + ) + assert qr.responseHeader.status == 0 + assert qr.response.num_found == 2 + assert len(qr.response.docs) == 2 + + su1 = User.from_dict(qr.response.docs[0]) + su2 = User.from_dict(qr.response.docs[1]) + assert su1.score is not None and su1.score > 0 + assert su2.score is not None and su2.score > 0 + assert su1.reset_solr_fields() == u2 + assert su2.reset_solr_fields() == u1 + + +@pytest.mark.asyncio +async def test_insert_and_query_group(solr_search): + async with DefaultSolrClient(solr_search) as client: + g = test_entity_documents.group_team + r1 = await client.upsert([g]) # type:ignore + assert_upsert_result(r1) + + qr = await client.query(SolrQuery.query_all_fields("_type:Group")) + assert qr.responseHeader.status == 0 + assert qr.response.num_found == 1 + assert len(qr.response.docs) == 1 + + sg = Group.from_dict(qr.response.docs[0]) + assert sg.score is not None and sg.score > 0 + assert sg.reset_solr_fields() == g + + +@pytest.mark.asyncio +async def test_status_for_non_existing_core(solr_config): + cfg = SolrClientConfig(base_url=solr_config.base_url, core="blahh-blah", user=solr_config.user) + async with DefaultSolrAdminClient(cfg) as client: + status = await client.core_status(None) + assert status is None + + +@pytest.mark.asyncio +async def test_status_for_existing_core(solr_config): + async with DefaultSolrAdminClient(solr_config) as client: + status = await client.core_status(None) + print(status) + assert status is not None + assert status["name"] == solr_config.core + assert status["schema"] == "managed-schema.xml" + assert "dataDir" in status + assert "config" in status + assert "index" in status + assert "userData" in status["index"] + + +@pytest.mark.asyncio +async def test_create_new_core(solr_config): + random_name = "".join(random.choices(string.ascii_lowercase + string.digits, k=9)) + async with DefaultSolrAdminClient(solr_config) as client: + res = await client.create(random_name) + assert res is None + + next_cfg = SolrClientConfig(base_url=solr_config.base_url, core=random_name, user=solr_config.user) + async with DefaultSolrAdminClient(next_cfg) as client: + res = await client.core_status(None) + assert res is not None + + async with DefaultSolrClient(next_cfg) as client: + resp = await client.modify_schema( + SchemaCommandList( + [ + AddCommand(FieldTypes.string), + AddCommand(Field.of(Fields.kind, FieldTypes.string)), + ] + ) + ) + assert resp.status_code == 200 + + +@pytest.mark.asyncio +async def test_create_same_core_twice(solr_config): + random_name = "".join(random.choices(string.ascii_lowercase + string.digits, k=9)) + async with DefaultSolrAdminClient(solr_config) as client: + res = await client.create(random_name) + assert res is None + + with pytest.raises(SolrClientCreateCoreException): + await client.create(random_name) + + +@pytest.mark.asyncio +async def test_sub_query(solr_search): + async with DefaultSolrClient(solr_search) as client: + u1 = test_entity_documents.user_tadej_pogacar + u2 = test_entity_documents.user_jan_ullrich + p = test_entity_documents.project_ai_stuff + r1 = await client.upsert([u1, u2, p]) + assert_upsert_result(r1) + + creator_details = FieldName("creatorDetails") + + query = SolrQuery.query_all_fields("_type:Project").add_sub_query( + creator_details, + SubQuery(query="{!terms f=id v=$row.createdBy}", filter="{!terms f=_kind v=fullentity}", limit=1), + ) + + r2 = await client.query(query) + assert len(r2.response.docs) == 1 + details = r2.response.docs[0][creator_details] + assert len(details["docs"]) == 1 + user_doc = details["docs"][0] + user = User.model_validate(user_doc) + assert user.path == u2.path + assert user.id == u2.id + + +@pytest.mark.asyncio +async def test_run_facet_query(solr_search): + async with DefaultSolrClient(solr_search) as client: + u1 = test_entity_documents.user_tadej_pogacar + u2 = test_entity_documents.user_jan_ullrich + p = test_entity_documents.project_ai_stuff + r1 = await client.upsert([u1, u2, p]) + assert_upsert_result(r1) + + query = SolrQuery.query_all_fields("_type:*").with_facet( + FacetTerms(name=Fields.entity_type, field=Fields.entity_type) + ) + + r2 = await client.query(query) + assert len(r2.response.docs) == 3 + assert r2.facets == SolrBucketFacetResponse( + count=3, + buckets={ + Fields.entity_type: FacetBuckets.of( + FacetCount(field=FieldName("User"), count=2), FacetCount(field=FieldName("Project"), count=1) + ) + }, + ) diff --git a/test/components/renku_data_services/solr/test_solr_migrate.py b/test/components/renku_data_services/solr/test_solr_migrate.py new file mode 100644 index 000000000..33f461e85 --- /dev/null +++ b/test/components/renku_data_services/solr/test_solr_migrate.py @@ -0,0 +1,41 @@ +import random +import string + +import pytest + +from renku_data_services.solr import entity_schema +from renku_data_services.solr.solr_client import DefaultSolrAdminClient, SolrClientConfig +from renku_data_services.solr.solr_migrate import SchemaMigrator + + +@pytest.mark.asyncio +async def test_creating_schema(solr_config): + migrator = SchemaMigrator(solr_config) + migrations = entity_schema.all_migrations.copy() + result = await migrator.migrate(migrations) + migrations.sort(key=lambda e: e.version) + last = migrations[-1] + assert result.end_version == last.version + assert result.migrations_run == len(migrations) + assert result.migrations_skipped == 0 + assert result.requires_reindex + + +@pytest.mark.asyncio +async def test_creating_schema_in_new_core(solr_config): + random_name = "".join(random.choices(string.ascii_lowercase + string.digits, k=9)) + async with DefaultSolrAdminClient(solr_config) as client: + res = await client.create(random_name) + assert res is None + + next_cfg = SolrClientConfig(base_url=solr_config.base_url, core=random_name, user=solr_config.user) + + migrator = SchemaMigrator(next_cfg) + migrations = entity_schema.all_migrations.copy() + result = await migrator.migrate(migrations) + migrations.sort(key=lambda e: e.version) + last = migrations[-1] + assert result.end_version == last.version + assert result.migrations_run == len(migrations) + assert result.migrations_skipped == 0 + assert result.requires_reindex diff --git a/test/components/renku_data_services/solr/test_solr_schema.py b/test/components/renku_data_services/solr/test_solr_schema.py new file mode 100644 index 000000000..347a1384d --- /dev/null +++ b/test/components/renku_data_services/solr/test_solr_schema.py @@ -0,0 +1,53 @@ +from renku_data_services.solr.solr_schema import ( + AddCommand, + Analyzer, + CopyFieldRule, + Field, + FieldName, + FieldType, + FieldTypeClasses, + Filters, + SchemaCommand, + SchemaCommandList, + Tokenizers, + TypeName, +) + + +def test_multiple_commands_in_one_object(): + tokenizer = Tokenizers.classic + analyzer = Analyzer(tokenizer=tokenizer, filters=[Filters.ngram]) + + ft = FieldType(name=TypeName("name_s"), clazz=FieldTypeClasses.type_text, indexAnalyzer=analyzer) + + cmds: list[SchemaCommand] = [ + AddCommand(Field.of(name=FieldName("project_name_s"), type=ft)), + AddCommand(Field.of(name=FieldName("user_name_s"), type=ft)), + AddCommand(CopyFieldRule(source=FieldName("username"), dest=FieldName("content_all"))), + ] + + json = SchemaCommandList(value=cmds).to_json() + assert json == "".join( + [ + '{"add-field":{"name": "project_name_s", "type": "name_s"},', + '"add-field":{"name": "user_name_s", "type": "name_s"},', + '"add-copy-field":{"source": "username", "dest": "content_all"}}', + ] + ) + + +def test_encode_schema_command_add(): + v = AddCommand(Field(name=FieldName("description"), type=TypeName("integer"))) + assert SchemaCommandList([v]).to_json() == """{"add-field":{"name": "description", "type": "integer"}}""" + + +def test_encode_filter_with_settings(): + filter = Filters.edgeNgram() + json = filter.to_dict() + assert json == {"minGramSize": "3", "maxGramSize": "6", "preserveOriginal": "true", "name": "edgeNGram"} + + +def test_encode_filter_without_settings(): + filter = Filters.english_minimal_stem + json = filter.to_dict() + assert json == {"name": "englishMinimalStem"} diff --git a/test/components/renku_data_services/users/test_db.py b/test/components/renku_data_services/users/test_db.py new file mode 100644 index 000000000..cbd511bd6 --- /dev/null +++ b/test/components/renku_data_services/users/test_db.py @@ -0,0 +1,43 @@ +"""Tests for database users.""" + +from collections.abc import Callable +from dataclasses import dataclass +from typing import cast + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession + +from renku_data_services.base_models.core import APIUser +from renku_data_services.base_models.nel import Nel +from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.users.db import DbUsernameResolver, UserRepo +from renku_data_services.users.models import UserInfo + + +@dataclass +class TestUsernameResolver(DbUsernameResolver): + session_maker: Callable[..., AsyncSession] + + def make_session(self) -> AsyncSession: + return self.session_maker() + + +def _username(info: UserInfo) -> str: + return info.namespace.path.first.value + + +@pytest.mark.asyncio +async def test_username_resolve(app_manager_instance) -> None: + run_migrations_for_app("common") + user_repo: UserRepo = app_manager_instance.kc_user_repo + user1 = APIUser(id="id-123", first_name="Mads", last_name="Pedersen") + user2 = APIUser(id="id-234", first_name="Wout", last_name="van Art") + user_info1 = cast(UserInfo, await user_repo.get_or_create_user(user1, str(user1.id))) + user_info2 = cast(UserInfo, await user_repo.get_or_create_user(user2, str(user2.id))) + + resolver = TestUsernameResolver(app_manager_instance.config.db.async_session_maker) + data = await resolver.resolve_usernames(Nel.of("a.b", _username(user_info1), _username(user_info2))) + assert data is not None + assert data.get(_username(user_info1)) == user_info1.id + assert data.get(_username(user_info2)) == user_info2.id + assert len(data) == 2 diff --git a/test/components/renku_pack_builder/test_environment_build.py b/test/components/renku_pack_builder/test_environment_build.py new file mode 100644 index 000000000..5546a3851 --- /dev/null +++ b/test/components/renku_pack_builder/test_environment_build.py @@ -0,0 +1,90 @@ +import subprocess +import time +from pathlib import Path + +import pytest +import yaml + + +def kubectl_apply(namespace: str, manifest: str) -> subprocess.CompletedProcess: + cmd = ["kubectl", "--namespace", namespace, "apply", "-f", manifest] + return subprocess.run(cmd, capture_output=True) + + +def kubectl_delete(namespace: str, manifest: str) -> subprocess.CompletedProcess: + cmd = ["kubectl", "--namespace", namespace, "delete", "--ignore-not-found", "-f", manifest] + return subprocess.run(cmd, capture_output=True) + + +@pytest.fixture +def manifest_path() -> str: + yield Path(__file__).parent / "../../../components/renku_pack_builder/manifests" + + +@pytest.fixture(scope="module") +def namespace() -> str: + ns = "shipwright-tests" + cmd = ["kubectl", "create", "namespace", ns] + result = subprocess.run(cmd) + assert result.returncode == 0 + + yield ns + + cmd = ["kubectl", "delete", "namespace", ns] + result = subprocess.run(cmd) + assert result.returncode == 0 + + +@pytest.fixture +def buildrun(manifest_path: str) -> str: + yield manifest_path / "buildrun.yaml" + + +@pytest.fixture(autouse=True) +def setup_shipwrite_crds(namespace: str, manifest_path: str) -> None: + manifests = ["buildstrategy.yaml", "build.yaml"] + + for manifest in manifests: + result = kubectl_apply(namespace, manifest_path / manifest) + assert result.returncode == 0 + + yield + + for manifest in reversed(manifests): + result = kubectl_delete(namespace, manifest_path / manifest) + assert result.returncode == 0 + + +@pytest.mark.skip(reason="current broken, fix this before releasing the shipwright feature") +def test_buildpacks_buildstrategy(namespace: str, buildrun: str) -> None: + result = kubectl_apply(namespace, buildrun) + assert result.returncode == 0 + + with open(buildrun) as f: + buildrun_content = yaml.safe_load(f) + + buildrun_name = buildrun_content.get("metadata", {}).get("name", None) + cmd = [ + "kubectl", + "--namespace", + namespace, + "get", + "buildrun", + buildrun_name, + "-o", + "jsonpath={.status.conditions[0]['reason']}", + ] + + succeeded = False + for _i in range(5 * 60): + result = subprocess.run(cmd, capture_output=True) + assert result.returncode == 0 + + succeeded = result.stdout == b"Succeeded" + if succeeded: + break + time.sleep(1) + + kubectl_delete(namespace, buildrun) + + assert succeeded diff --git a/test/conftest.py b/test/conftest.py index 788c993eb..99007d3be 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,16 +1,19 @@ """Fixtures for testing.""" import asyncio -import logging +import logging as ll import os import secrets import socket +import stat import subprocess -from collections.abc import AsyncGenerator, Iterator +from collections.abc import AsyncGenerator +from distutils.dir_util import copy_tree from multiprocessing import Lock from pathlib import Path from uuid import uuid4 +import httpx import pytest import pytest_asyncio import uvloop @@ -21,12 +24,37 @@ from ulid import ULID import renku_data_services.base_models as base_models -from renku_data_services.app_config import Config as DataConfig +from renku_data_services.app_config import logging from renku_data_services.authz.config import AuthzConfig +from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.db_config.config import DBConfig -from renku_data_services.secrets.config import Config as SecretsConfig +from renku_data_services.secrets_storage_api.dependencies import DependencyManager as SecretsDependencyManager +from renku_data_services.solr import entity_schema +from renku_data_services.solr.solr_client import SolrClientConfig +from renku_data_services.solr.solr_migrate import SchemaMigrator from renku_data_services.users import models as user_preferences_models -from test.utils import TestAppConfig +from test.utils import TestDependencyManager + + +def __make_logging_config() -> logging.Config: + def_cfg = logging.Config( + root_level=ll.ERROR, + app_level=ll.ERROR, + format_style=logging.LogFormatStyle.plain, + override_levels={ll.ERROR: set(["alembic", "sanic"])}, + ) + env_cfg = logging.Config.from_env() + def_cfg.update_override_levels(env_cfg.override_levels) + + test_cfg = logging.Config.from_env(prefix="TEST_") + def_cfg.update_override_levels(test_cfg.override_levels) + return def_cfg + + +logging.configure_logging(__make_logging_config()) + + +logger = logging.getLogger(__name__) settings.register_profile("ci", deadline=400, max_examples=5) settings.register_profile("dev", deadline=200, max_examples=5) @@ -57,8 +85,7 @@ async def monkeysession(): mpatch.undo() -@pytest_asyncio.fixture(scope="session") -async def free_port() -> int: +def free_port() -> int: lock = Lock() with lock, socket.socket() as s: s.bind(("", 0)) @@ -67,8 +94,8 @@ async def free_port() -> int: @pytest_asyncio.fixture(scope="session") -async def authz_setup(monkeysession, free_port) -> AsyncGenerator[None, None]: - port = free_port +async def authz_setup(monkeysession) -> AsyncGenerator[None, None]: + port = free_port() proc = subprocess.Popen( [ "spicedb", @@ -88,13 +115,13 @@ async def authz_setup(monkeysession, free_port) -> AsyncGenerator[None, None]: try: proc.terminate() except Exception as err: - logging.error(f"Encountered error when shutting down Authzed DB for testing {err}") + logger.error(f"Encountered error when shutting down Authzed DB for testing {err}") proc.kill() @pytest_asyncio.fixture async def db_config(monkeypatch, worker_id, authz_setup) -> AsyncGenerator[DBConfig, None]: - db_name = str(ULID()).lower() + "_" + worker_id + db_name = "R_" + str(ULID()).lower() + "_" + worker_id user = os.getenv("DB_USER", "renku") host = os.getenv("DB_HOST", "127.0.0.1") port = os.getenv("DB_PORT", "5432") @@ -116,8 +143,8 @@ async def db_config(monkeypatch, worker_id, authz_setup) -> AsyncGenerator[DBCon @pytest_asyncio.fixture -async def db_instance(monkeysession, worker_id, app_config, event_loop) -> AsyncGenerator[DBConfig, None]: - db_name = str(ULID()).lower() + "_" + worker_id +async def db_instance(monkeysession, worker_id, app_manager, event_loop) -> AsyncGenerator[DBConfig, None]: + db_name = "R_" + str(ULID()).lower() + "_" + worker_id user = os.getenv("DB_USER", "renku") host = os.getenv("DB_HOST", "127.0.0.1") port = os.getenv("DB_PORT", "5432") @@ -135,17 +162,17 @@ async def db_instance(monkeysession, worker_id, app_config, event_loop) -> Async template_dbname="renku_template", ): db = DBConfig.from_env() - app_config.db.push(db) + app_manager.config.db.push(db) yield db - await app_config.db.pop() + await app_manager.config.db.pop() @pytest_asyncio.fixture -async def authz_instance(app_config, monkeypatch) -> Iterator[AuthzConfig]: +async def authz_instance(app_manager: DependencyManager, monkeypatch) -> AsyncGenerator[AuthzConfig]: monkeypatch.setenv("AUTHZ_DB_KEY", f"renku-{uuid4().hex}") - app_config.authz_config.push(AuthzConfig.from_env()) - yield app_config.authz_config - app_config.authz_config.pop() + app_manager.config.authz_config.push(AuthzConfig.from_env()) + yield app_manager.config.authz_config + app_manager.config.authz_config.pop() @pytest_asyncio.fixture(scope="session") @@ -185,29 +212,31 @@ async def dummy_users(): @pytest_asyncio.fixture(scope="session") -async def app_config( +async def app_manager( authz_setup, monkeysession, worker_id, secrets_key_pair, dummy_users -) -> AsyncGenerator[DataConfig, None]: +) -> AsyncGenerator[DependencyManager, None]: monkeysession.setenv("DUMMY_STORES", "true") monkeysession.setenv("MAX_PINNED_PROJECTS", "5") monkeysession.setenv("NB_SERVER_OPTIONS__DEFAULTS_PATH", "server_defaults.json") monkeysession.setenv("NB_SERVER_OPTIONS__UI_CHOICES_PATH", "server_options.json") - config = TestAppConfig.from_env(dummy_users) + dm = TestDependencyManager.from_env(dummy_users) + app_name = "app_" + str(ULID()).lower() + "_" + worker_id - config.app_name = app_name - yield config + dm.app_name = app_name + yield dm @pytest_asyncio.fixture -async def app_config_instance(app_config, db_instance, authz_instance) -> AsyncGenerator[DataConfig, None]: - yield app_config +async def app_manager_instance(app_manager, db_instance, authz_instance) -> AsyncGenerator[DependencyManager, None]: + app_manager.metrics.reset_mock() + yield app_manager @pytest_asyncio.fixture -async def secrets_storage_app_config( +async def secrets_storage_app_manager( db_config: DBConfig, secrets_key_pair, monkeypatch, tmp_path -) -> AsyncGenerator[DataConfig, None]: +) -> AsyncGenerator[SecretsDependencyManager, None]: encryption_key_path = tmp_path / "encryption-key" encryption_key_path.write_bytes(secrets.token_bytes(32)) @@ -216,8 +245,8 @@ async def secrets_storage_app_config( monkeypatch.setenv("DB_NAME", db_config.db_name) monkeypatch.setenv("MAX_PINNED_PROJECTS", "5") - config = SecretsConfig.from_env() - yield config + dm = SecretsDependencyManager.from_env() + yield dm @pytest_asyncio.fixture @@ -264,3 +293,113 @@ def pytest_runtest_setup(item): pytest.mark.skipif(not os.getenv("PYTEST_FORCE_RUN_MYSKIPS", False) and condition, reason=reason), append=False, ) + + +@pytest.fixture(scope="session") +def solr_bin_path(): + solr_bin = os.getenv("SOLR_BIN_PATH") + if solr_bin is None: + solr_bin = "solr" + return solr_bin + + +async def __wait_for_solr(host: str, port: int) -> None: + tries = 0 + with httpx.Client() as c: + while True: + try: + c.get(f"http://{host}:{port}/solr") + return None + except Exception as err: + print(err) + if tries >= 20: + raise Exception(f"Cannot connect to solr, gave up after {tries} tries.") from err + else: + tries = tries + 1 + await asyncio.sleep(1) + + +@pytest_asyncio.fixture(scope="session") +async def solr_instance(tmp_path_factory, monkeysession, solr_bin_path): + solr_root = tmp_path_factory.mktemp("solr") + solr_bin = solr_bin_path + port = free_port() + logger.info(f"Starting SOLR at port {port}") + args = [ + solr_bin, + "start", + "-f", + "--jvm-opts", + "-Xmx256M -Xms256M", + "--host", + "localhost", + "--port", + f"{port}", + "-s", + f"{solr_root}", + "-t", + f"{solr_root}", + "--user-managed", + ] + logger.info(f"Starting SOLR via: {args}") + proc = subprocess.Popen( + args, + env={"PATH": os.getenv("PATH", ""), "SOLR_LOGS_DIR": f"{solr_root}", "SOLR_ULIMIT_CHECKS": "false"}, + ) + monkeysession.setenv("SOLR_TEST_PORT", f"{port}") + monkeysession.setenv("SOLR_ROOT_DIR", solr_root) + monkeysession.setenv("SOLR_URL", f"http://localhost:{port}") + + await __wait_for_solr("localhost", port) + + yield + try: + proc.terminate() + except Exception as err: + logger.error(f"Encountered error when shutting down solr for testing {err}") + proc.kill() + + +@pytest.fixture +def solr_core(solr_instance, monkeypatch): + core_name = "test_core_" + str(ULID()).lower()[-12:] + monkeypatch.setenv("SOLR_TEST_CORE", core_name) + monkeypatch.setenv("SOLR_CORE", core_name) + return core_name + + +@pytest.fixture() +def solr_config(solr_core, solr_bin_path): + core = solr_core + solr_port = os.getenv("SOLR_TEST_PORT") + if solr_port is None: + raise ValueError("No SOLR_TEST_PORT env variable found") + + solr_url = f"http://localhost:{solr_port}" + solr_config = SolrClientConfig(base_url=solr_url, core=core) + solr_bin = solr_bin_path + result = subprocess.run([solr_bin, "create", "--solr-url", solr_url, "-c", core]) + result.check_returncode() + + # Unfortunately, solr creates core directories with only read permissions + # Then changing the schema via the api fails, because it can't write to that file + root_dir = os.getenv("SOLR_ROOT_DIR") + conf_file = f"{root_dir}/{core}/conf/managed-schema.xml" + os.chmod(conf_file, stat.S_IRUSR | stat.S_IWUSR | stat.S_IROTH | stat.S_IRGRP) + + # we also need to create the configset/_default directory to make + # core-admin commands work + if not os.path.isdir(f"{root_dir}/configsets/_default"): + os.makedirs(f"{root_dir}/configsets/_default") + copy_tree(f"{root_dir}/{core}/conf", f"{root_dir}/configsets/_default/conf") + + return solr_config + + +@pytest_asyncio.fixture() +async def solr_search(solr_config, app_manager): + migrator = SchemaMigrator(solr_config) + result = await migrator.migrate(entity_schema.all_migrations) + assert result.migrations_run == len(entity_schema.all_migrations) + + return solr_config diff --git a/test/utils.py b/test/utils.py index 4407959f9..6e4958aa7 100644 --- a/test/utils.py +++ b/test/utils.py @@ -1,45 +1,50 @@ import os -import secrets import typing from collections.abc import Callable from dataclasses import asdict, dataclass -from pathlib import Path from typing import Any, Self +from unittest.mock import MagicMock from authzed.api.v1 import AsyncClient, SyncClient -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import rsa -from cryptography.hazmat.primitives.asymmetric.types import PublicKeyTypes from sanic import Request from sanic_testing.testing import ASGI_HOST, ASGI_PORT, SanicASGITestClient, TestingResponse from sqlalchemy.ext.asyncio import AsyncSession -from yaml import safe_load import renku_data_services.base_models as base_models -from renku_data_services import errors -from renku_data_services.app_config.config import Config, SentryConfig, TrustedProxiesConfig from renku_data_services.authn.dummy import DummyAuthenticator, DummyUserStore from renku_data_services.authz.authz import Authz from renku_data_services.authz.config import AuthzConfig +from renku_data_services.base_models.metrics import MetricsService +from renku_data_services.connected_services.db import ConnectedServicesRepository from renku_data_services.crc import models as rp_models -from renku_data_services.crc.db import ResourcePoolRepository -from renku_data_services.data_api.server_options import ( - ServerOptions, - ServerOptionsDefaults, - generate_default_resource_pool, -) +from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository, UserRepository +from renku_data_services.data_api.config import Config as AppConfig +from renku_data_services.data_api.dependencies import DependencyManager +from renku_data_services.data_connectors.db import DataConnectorRepository, DataConnectorSecretRepository from renku_data_services.db_config.config import DBConfig from renku_data_services.git.gitlab import DummyGitlabAPI from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient from renku_data_services.k8s.quota import QuotaRepository -from renku_data_services.message_queue.config import RedisConfig -from renku_data_services.message_queue.redis_queue import RedisQueue -from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.message_queue.db import ReprovisioningRepository +from renku_data_services.metrics.db import MetricsRepository +from renku_data_services.namespace.db import GroupRepository +from renku_data_services.platform.db import PlatformRepository +from renku_data_services.project.db import ( + ProjectMemberRepository, + ProjectMigrationRepository, + ProjectRepository, + ProjectSessionSecretRepository, +) +from renku_data_services.repositories.db import GitRepositoriesRepository +from renku_data_services.search.db import SearchUpdatesRepo +from renku_data_services.search.reprovision import SearchReprovision +from renku_data_services.secrets.db import LowLevelUserSecretsRepo, UserSecretsRepo +from renku_data_services.session.db import SessionRepository from renku_data_services.storage import models as storage_models from renku_data_services.storage.db import StorageRepository from renku_data_services.users import models as user_preferences_models -from renku_data_services.users.config import UserPreferencesConfig from renku_data_services.users.db import UserPreferencesRepository +from renku_data_services.users.db import UserRepo as KcUserRepo from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI from renku_data_services.users.kc_api import IKeycloakAPI @@ -87,8 +92,8 @@ def async_session_maker(self) -> Callable[..., AsyncSession]: return StackSessionMaker(self) @classmethod - def from_env(cls, prefix: str = "") -> Self: - db = DBConfig.from_env(prefix) + def from_env(cls) -> Self: + db = DBConfig.from_env() this = cls() this.push(db) return this @@ -126,8 +131,8 @@ def current(self) -> AuthzConfig: return self.stack[-1] @classmethod - def from_env(cls, prefix: str = "") -> Self: - config = AuthzConfig.from_env(prefix) + def from_env(cls) -> Self: + config = AuthzConfig.from_env() this = cls() this.push(config) return this @@ -153,38 +158,23 @@ def client(self) -> AsyncClient: @dataclass -class TestAppConfig(Config): +class TestDependencyManager(DependencyManager): """Test class that can handle isolated dbs and authz instances.""" @classmethod - def from_env(cls, dummy_users: list[user_preferences_models.UnsavedUserInfo], prefix: str = "") -> "Config": + def from_env( + cls, dummy_users: list[user_preferences_models.UnsavedUserInfo], prefix: str = "" + ) -> "DependencyManager": """Create a config from environment variables.""" - + db = DBConfigStack.from_env() + config = AppConfig.from_env(db) user_store: base_models.UserStore authenticator: base_models.Authenticator gitlab_authenticator: base_models.Authenticator gitlab_client: base_models.GitlabAPIProtocol - user_preferences_config: UserPreferencesConfig - version = os.environ.get(f"{prefix}VERSION", "0.0.1") - server_options_file = os.environ.get("SERVER_OPTIONS") - server_defaults_file = os.environ.get("SERVER_DEFAULTS") k8s_namespace = os.environ.get("K8S_NAMESPACE", "default") - max_pinned_projects = int(os.environ.get(f"{prefix}MAX_PINNED_PROJECTS", "10")) - user_preferences_config = UserPreferencesConfig(max_pinned_projects=max_pinned_projects) - db = DBConfigStack.from_env(prefix) + config.authz_config = AuthzConfigStack.from_env() kc_api: IKeycloakAPI - secrets_service_public_key: PublicKeyTypes - gitlab_url: str | None - - encryption_key = secrets.token_bytes(32) - secrets_service_public_key_path = os.getenv(f"{prefix}SECRETS_SERVICE_PUBLIC_KEY_PATH") - if secrets_service_public_key_path is not None: - secrets_service_public_key = serialization.load_pem_public_key( - Path(secrets_service_public_key_path).read_bytes() - ) - else: - private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) - secrets_service_public_key = private_key.public_key() authenticator = DummyAuthenticator() gitlab_authenticator = DummyAuthenticator() @@ -192,65 +182,166 @@ def from_env(cls, dummy_users: list[user_preferences_models.UnsavedUserInfo], pr user_always_exists = os.environ.get("DUMMY_USERSTORE_USER_ALWAYS_EXISTS", "true").lower() == "true" user_store = DummyUserStore(user_always_exists=user_always_exists) gitlab_client = DummyGitlabAPI() - kc_api = DummyKeycloakAPI(users=[i._to_keycloak_dict() for i in dummy_users]) - redis = RedisConfig.fake() - gitlab_url = None - - if not isinstance(secrets_service_public_key, rsa.RSAPublicKey): - raise errors.ConfigurationError(message="Secret service public key is not an RSAPublicKey") - - sentry = SentryConfig.from_env(prefix) - trusted_proxies = TrustedProxiesConfig.from_env(prefix) - message_queue = RedisQueue(redis) - nb_config = NotebooksConfig.from_env(db) + kc_api = DummyKeycloakAPI(users=[i.to_keycloak_dict() for i in dummy_users]) + + authz = NonCachingAuthz(config.authz_config) + search_updates_repo = SearchUpdatesRepo(session_maker=config.db.async_session_maker) + group_repo = GroupRepository( + session_maker=config.db.async_session_maker, + group_authz=authz, + search_updates_repo=search_updates_repo, + ) + kc_user_repo = KcUserRepo( + session_maker=config.db.async_session_maker, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + encryption_key=config.secrets.encryption_key, + authz=authz, + ) + user_repo = UserRepository( + session_maker=config.db.async_session_maker, + quotas_repo=quota_repo, + user_repo=kc_user_repo, + ) + rp_repo = ResourcePoolRepository(session_maker=config.db.async_session_maker, quotas_repo=quota_repo) + storage_repo = StorageRepository( + session_maker=config.db.async_session_maker, + gitlab_client=gitlab_client, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + ) + reprovisioning_repo = ReprovisioningRepository(session_maker=config.db.async_session_maker) + project_repo = ProjectRepository( + session_maker=config.db.async_session_maker, + authz=authz, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + ) + session_repo = SessionRepository( + session_maker=config.db.async_session_maker, + project_authz=authz, + resource_pools=rp_repo, + shipwright_client=None, + builds_config=config.builds, + ) + project_migration_repo = ProjectMigrationRepository( + session_maker=config.db.async_session_maker, + authz=authz, + project_repo=project_repo, + session_repo=session_repo, + ) + project_member_repo = ProjectMemberRepository( + session_maker=config.db.async_session_maker, + authz=authz, + ) + project_session_secret_repo = ProjectSessionSecretRepository( + session_maker=config.db.async_session_maker, + authz=authz, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + ) + user_preferences_repo = UserPreferencesRepository( + session_maker=config.db.async_session_maker, + user_preferences_config=config.user_preferences, + ) + low_level_user_secrets_repo = LowLevelUserSecretsRepo( + session_maker=config.db.async_session_maker, + ) + user_secrets_repo = UserSecretsRepo( + session_maker=config.db.async_session_maker, + low_level_repo=low_level_user_secrets_repo, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + ) + connected_services_repo = ConnectedServicesRepository( + session_maker=config.db.async_session_maker, + encryption_key=config.secrets.encryption_key, + async_oauth2_client_class=cls.async_oauth2_client_class, + internal_gitlab_url=config.gitlab_url, + ) + git_repositories_repo = GitRepositoriesRepository( + session_maker=config.db.async_session_maker, + connected_services_repo=connected_services_repo, + internal_gitlab_url=config.gitlab_url, + ) + platform_repo = PlatformRepository( + session_maker=config.db.async_session_maker, + ) + data_connector_repo = DataConnectorRepository( + session_maker=config.db.async_session_maker, + authz=authz, + project_repo=project_repo, + group_repo=group_repo, + search_updates_repo=search_updates_repo, + ) + data_connector_secret_repo = DataConnectorSecretRepository( + session_maker=config.db.async_session_maker, + data_connector_repo=data_connector_repo, + user_repo=kc_user_repo, + secret_service_public_key=config.secrets.public_key, + authz=authz, + ) + search_reprovisioning = SearchReprovision( + search_updates_repo=search_updates_repo, + reprovisioning_repo=reprovisioning_repo, + solr_config=config.solr, + user_repo=kc_user_repo, + group_repo=group_repo, + project_repo=project_repo, + data_connector_repo=data_connector_repo, + ) + cluster_repo = ClusterRepository(session_maker=config.db.async_session_maker) + metrics_repo = MetricsRepository(session_maker=config.db.async_session_maker) + metrics_mock = MagicMock(spec=MetricsService) return cls( - version=version, + config=config, authenticator=authenticator, gitlab_authenticator=gitlab_authenticator, gitlab_client=gitlab_client, user_store=user_store, quota_repo=quota_repo, - sentry=sentry, - trusted_proxies=trusted_proxies, - server_defaults_file=server_defaults_file, - server_options_file=server_options_file, - user_preferences_config=user_preferences_config, - db=db, - redis=redis, kc_api=kc_api, - message_queue=message_queue, - encryption_key=encryption_key, - secrets_service_public_key=secrets_service_public_key, - gitlab_url=gitlab_url, - authz_config=AuthzConfigStack.from_env(), - nb_config=nb_config, + user_repo=user_repo, + rp_repo=rp_repo, + storage_repo=storage_repo, + reprovisioning_repo=reprovisioning_repo, + search_updates_repo=search_updates_repo, + search_reprovisioning=search_reprovisioning, + project_repo=project_repo, + project_migration_repo=project_migration_repo, + project_member_repo=project_member_repo, + project_session_secret_repo=project_session_secret_repo, + group_repo=group_repo, + session_repo=session_repo, + user_preferences_repo=user_preferences_repo, + kc_user_repo=kc_user_repo, + user_secrets_repo=user_secrets_repo, + connected_services_repo=connected_services_repo, + git_repositories_repo=git_repositories_repo, + platform_repo=platform_repo, + data_connector_repo=data_connector_repo, + data_connector_secret_repo=data_connector_secret_repo, + cluster_repo=cluster_repo, + metrics_repo=metrics_repo, + metrics=metrics_mock, + shipwright_client=None, + authz=authz, + low_level_user_secrets_repo=low_level_user_secrets_repo, ) def __post_init__(self) -> None: self.spec = self.load_apispec() - if self.default_resource_pool_file is not None: - with open(self.default_resource_pool_file) as f: - self.default_resource_pool = rp_models.ResourcePool.from_dict(safe_load(f)) - if self.server_defaults_file is not None and self.server_options_file is not None: - with open(self.server_options_file) as f: - options = ServerOptions.model_validate(safe_load(f)) - with open(self.server_defaults_file) as f: - defaults = ServerOptionsDefaults.model_validate(safe_load(f)) - self.default_resource_pool = generate_default_resource_pool(options, defaults) - - self.authz = NonCachingAuthz(self.authz_config) - class SanicReusableASGITestClient(SanicASGITestClient): - """Reuasable async test client for sanic. + """Reusable async test client for sanic. Sanic has 3 test clients, SanicTestClient (sync), SanicASGITestClient (async) and ReusableClient (sync). The first two will drop all routes and server state before each request (!) and calculate all routes again and execute server start code again (!), whereas the latter only does that once per client, but isn't async. This can cost as much as 40% of test execution time. - This class is essentially a combination of SanicASGITestClient and ReuasbleClient. + This class is essentially a combination of SanicASGITestClient and ReusableClient. """ set_up = False @@ -315,12 +406,6 @@ async def request( # type: ignore return None, response # type: ignore -def remove_id_from_quota(quota: rp_models.Quota) -> rp_models.Quota: - kwargs = asdict(quota) - kwargs["id"] = None - return rp_models.Quota(**kwargs) - - def remove_id_from_rc(rc: rp_models.ResourceClass) -> rp_models.ResourceClass: kwargs = asdict(rc) kwargs["id"] = None @@ -328,24 +413,7 @@ def remove_id_from_rc(rc: rp_models.ResourceClass) -> rp_models.ResourceClass: def remove_quota_from_rc(rc: rp_models.ResourceClass) -> rp_models.ResourceClass: - return rc.update(quota=None) - - -def remove_id_from_rp(rp: rp_models.ResourcePool) -> rp_models.ResourcePool: - quota = rp.quota - if isinstance(quota, rp_models.Quota): - quota = remove_id_from_quota(quota) - classes = [remove_quota_from_rc(remove_id_from_rc(rc)) for rc in rp.classes] - return rp_models.ResourcePool( - name=rp.name, - id=None, - quota=quota, - classes=classes, - default=rp.default, - public=rp.public, - idle_threshold=rp.idle_threshold, - hibernation_threshold=rp.hibernation_threshold, - ) + return rc.update(quota={}) def remove_id_from_user(user: base_models.User) -> base_models.User: @@ -362,12 +430,11 @@ async def create_rp( rp: rp_models.ResourcePool, repo: ResourcePoolRepository, api_user: base_models.APIUser ) -> rp_models.ResourcePool: inserted_rp = await repo.insert_resource_pool(api_user, rp) + assert inserted_rp is not None assert inserted_rp.id is not None assert inserted_rp.quota is not None assert all([rc.id is not None for rc in inserted_rp.classes]) - inserted_rp_no_ids = remove_id_from_rp(inserted_rp) - assert rp == inserted_rp_no_ids, f"resource pools do not match {rp} != {inserted_rp_no_ids}" retrieved_rps = await repo.get_resource_pools(api_user, inserted_rp.id) assert len(retrieved_rps) == 1 assert inserted_rp.id == retrieved_rps[0].id diff --git a/user/v1/asyncapi.yaml b/user/v1/asyncapi.yaml deleted file mode 100644 index 7919f21a7..000000000 --- a/user/v1/asyncapi.yaml +++ /dev/null @@ -1,56 +0,0 @@ -asyncapi: 3.0.0 -info: - title: Project Events - version: 0.0.1 -servers: - redis: - url: renku-redis - protocol: redis - description: Renku Redis Instance -channels: - user.added: - publish: - messages: - userAdded: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/added.avsc#/UserAdded' - traits: - - $ref: '#/components/messageTraits/headers' - user.updated: - publish: - messages: - userUpdated: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/UserUpdated' - traits: - - $ref: '#/components/messageTraits/headers' - user.removed: - publish: - messages: - userRemoved: - schemaFormat: "application/vnd.apache.avro;version=1.9.0" - payload: - type: object - properties: - payload: - - $ref: './events/created.avsc#/UserRemoved' - traits: - - $ref: '#/components/messageTraits/headers' -components: - messageTraits: - headers: - payload: - type: object - properties: - id: - type: string - headers: - - $ref: '../../common/v1/headers.avsc#/Header' diff --git a/user/v1/events/added.avsc b/user/v1/events/added.avsc deleted file mode 100644 index d7af45296..000000000 --- a/user/v1/events/added.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "record", - "name":"UserAdded", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a new user is added", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":["null","string"] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - } - ] -} diff --git a/user/v1/events/removed.avsc b/user/v1/events/removed.avsc deleted file mode 100644 index 90518fd20..000000000 --- a/user/v1/events/removed.avsc +++ /dev/null @@ -1,13 +0,0 @@ - -{ - "type": "record", - "name":"UserRemoved", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a user is removed", - "fields":[ - { - "name":"id", - "type":"string" - } - ] -} diff --git a/user/v1/events/updated.avsc b/user/v1/events/updated.avsc deleted file mode 100644 index 8af8c4b0b..000000000 --- a/user/v1/events/updated.avsc +++ /dev/null @@ -1,24 +0,0 @@ -{ - "type": "record", - "name":"UserUpdated", - "namespace":"io.renku.events.v1", - "doc":"Event raised when a user is updated", - "fields":[ - { - "name":"id", - "type":"string" - }, - { - "name": "firstName", - "type":["null","string"] - }, - { - "name":"lastName", - "type":["null","string"] - }, - { - "name":"email", - "type":["null","string"] - } - ] -} From 8896e4548c356681bd1316c139ba00e7ba5a82a2 Mon Sep 17 00:00:00 2001 From: Tasko Olevski Date: Thu, 24 Jul 2025 20:26:10 +0200 Subject: [PATCH 13/15] squashme: update apispec.py --- components/renku_data_services/users/apispec.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/components/renku_data_services/users/apispec.py b/components/renku_data_services/users/apispec.py index 2b45b2a95..a7a47de45 100644 --- a/components/renku_data_services/users/apispec.py +++ b/components/renku_data_services/users/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2025-05-08T14:23:21+00:00 +# timestamp: 2025-07-24T18:25:39+00:00 from __future__ import annotations @@ -216,12 +216,6 @@ class SecretWithId(BaseAPISpec): min_length=1, pattern="^[a-zA-Z0-9_\\-.]*$", ) - kind: SecretKind - expiration_timestamp: Optional[datetime] = Field( - None, - description="The date and time the secret is not valid anymore (this is in any timezone)", - example="2030-11-01T17:32:28UTC+01:00", - ) modification_date: datetime = Field( ..., description="The date and time the secret was created or modified (this is always in UTC)", @@ -230,6 +224,11 @@ class SecretWithId(BaseAPISpec): kind: SecretKind session_secret_slot_ids: List[Ulid] data_connector_ids: List[Ulid] + expiration_timestamp: Optional[datetime] = Field( + None, + description="The date and time the secret is not valid anymore (this is in any timezone)", + examples=["2030-11-01T17:32:28UTC+01:00"], + ) class SecretPost(BaseAPISpec): @@ -261,7 +260,7 @@ class SecretPost(BaseAPISpec): expiration_timestamp: Optional[datetime] = Field( None, description="The date and time the secret is not valid anymore (this is in any timezone)", - example="2030-11-01T17:32:28UTC+01:00", + examples=["2030-11-01T17:32:28UTC+01:00"], ) @@ -293,7 +292,7 @@ class SecretPatch(BaseAPISpec): expiration_timestamp: Optional[datetime] = Field( None, description="The date and time the secret is not valid anymore (this is in any timezone)", - example="2030-11-01T17:32:28UTC+01:00", + examples=["2030-11-01T17:32:28UTC+01:00"], ) From 43895c98e5f26c11935697bd18e006df6f220fba Mon Sep 17 00:00:00 2001 From: Tasko Olevski Date: Thu, 24 Jul 2025 20:37:59 +0200 Subject: [PATCH 14/15] squashme: fix linting --- .../data_connectors/blueprints.py | 4 +- components/renku_data_services/project/db.py | 4 +- components/renku_data_services/secrets/db.py | 8 ++- .../renku_data_services/secrets/models.py | 1 + .../storage/rclone_patches.py | 60 +++++++++++++++++++ components/renku_data_services/users/core.py | 1 + .../data_api/test_secret.py | 29 +++++---- .../data_api/test_storage.py | 2 +- 8 files changed, 90 insertions(+), 19 deletions(-) diff --git a/components/renku_data_services/data_connectors/blueprints.py b/components/renku_data_services/data_connectors/blueprints.py index a64cec687..cba706dba 100644 --- a/components/renku_data_services/data_connectors/blueprints.py +++ b/components/renku_data_services/data_connectors/blueprints.py @@ -9,7 +9,7 @@ from sanic_ext import validate from ulid import ULID -from renku_data_services import base_models, errors +from renku_data_services import base_models from renku_data_services.base_api.auth import ( authenticate, only_authenticated, @@ -471,7 +471,7 @@ async def openbis_transform_session_token_to_pat() -> ( openbis_pat[1], ) except Exception as e: - raise errors.ProgrammingError(message=str(e)) + raise errors.ProgrammingError(message=str(e)) from e raise errors.ValidationError(message="The openBIS session token must be a string value.") raise errors.ValidationError(message="The openBIS storage has only one secret: session_token") diff --git a/components/renku_data_services/project/db.py b/components/renku_data_services/project/db.py index e960de8f1..533998a3d 100644 --- a/components/renku_data_services/project/db.py +++ b/components/renku_data_services/project/db.py @@ -840,7 +840,9 @@ async def patch_session_secrets( ) if session_launcher_secret_orm := existing_secrets_as_dict.get(slot_id): session_launcher_secret_orm.secret.update( - encrypted_value=encrypted_value, encrypted_key=encrypted_key + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + expiration_timestamp=session_launcher_secret_orm.secret.expiration_timestamp, ) else: name = secret_slot.name diff --git a/components/renku_data_services/secrets/db.py b/components/renku_data_services/secrets/db.py index 73f9e5b23..f1003be7f 100644 --- a/components/renku_data_services/secrets/db.py +++ b/components/renku_data_services/secrets/db.py @@ -3,7 +3,7 @@ import random import string from collections.abc import AsyncGenerator, Callable, Sequence -from datetime import UTC, datetime, timedelta +from datetime import UTC, datetime from typing import cast from cryptography.hazmat.primitives.asymmetric import rsa @@ -200,7 +200,11 @@ async def update_secret(self, requested_by: APIUser, secret_id: ULID, patch: Sec secret_service_public_key=self.secret_service_public_key, secret_value=patch.secret_value, ) - secret.update(encrypted_value=encrypted_value, encrypted_key=encrypted_key) + secret.update( + encrypted_value=encrypted_value, + encrypted_key=encrypted_key, + expiration_timestamp=patch.expiration_timestamp, + ) return secret.dump() diff --git a/components/renku_data_services/secrets/models.py b/components/renku_data_services/secrets/models.py index de604577c..9c4186c52 100644 --- a/components/renku_data_services/secrets/models.py +++ b/components/renku_data_services/secrets/models.py @@ -90,3 +90,4 @@ class SecretPatch: name: str | None default_filename: str | None secret_value: str | None = field(repr=False) + expiration_timestamp: datetime | None diff --git a/components/renku_data_services/storage/rclone_patches.py b/components/renku_data_services/storage/rclone_patches.py index 230a4b808..c563491fb 100644 --- a/components/renku_data_services/storage/rclone_patches.py +++ b/components/renku_data_services/storage/rclone_patches.py @@ -256,3 +256,63 @@ def apply_patches(spec: list[dict[str, Any]]) -> None: for patch in patches: patch(spec) + + +def __patch_schema_add_openbis_type(spec: list[dict[str, Any]]) -> None: + """Adds a fake type to help with setting up openBIS storage.""" + spec.append( + { + "Name": "openbis", + "Description": "openBIS", + "Prefix": "openbis", + "Options": [ + { + "Name": "host", + "Help": 'openBIS host to connect to.\n\nE.g. "openbis-eln-lims.ethz.ch".', + "Provider": "", + "Default": "", + "Value": None, + "Examples": [ + { + "Value": "openbis-eln-lims.ethz.ch", + "Help": "Public openBIS demo instance", + "Provider": "", + }, + ], + "ShortOpt": "", + "Hide": 0, + "Required": True, + "IsPassword": False, + "NoPrefix": False, + "Advanced": False, + "Exclusive": False, + "Sensitive": False, + "DefaultStr": "", + "ValueStr": "", + "Type": "string", + }, + { + "Name": "session_token", + "Help": "openBIS session token", + "Provider": "", + "Default": "", + "Value": None, + "ShortOpt": "", + "Hide": 0, + "Required": True, + "IsPassword": True, + "NoPrefix": False, + "Advanced": False, + "Exclusive": False, + "Sensitive": True, + "DefaultStr": "", + "ValueStr": "", + "Type": "string", + }, + ], + "CommandHelp": None, + "Aliases": None, + "Hide": False, + "MetadataInfo": None, + } + ) diff --git a/components/renku_data_services/users/core.py b/components/renku_data_services/users/core.py index 3f2c790b9..aeef6ade0 100644 --- a/components/renku_data_services/users/core.py +++ b/components/renku_data_services/users/core.py @@ -21,4 +21,5 @@ def validate_secret_patch(patch: apispec.SecretPatch) -> SecretPatch: name=patch.name, default_filename=patch.default_filename, secret_value=patch.value, + expiration_timestamp=patch.expiration_timestamp, ) diff --git a/test/bases/renku_data_services/data_api/test_secret.py b/test/bases/renku_data_services/data_api/test_secret.py index 41beb2ebf..2e6f48e3f 100644 --- a/test/bases/renku_data_services/data_api/test_secret.py +++ b/test/bases/renku_data_services/data_api/test_secret.py @@ -27,14 +27,17 @@ @pytest.fixture def create_secret(sanic_client: SanicASGITestClient, user_headers): async def create_secret_helper( - name: str, value: str, kind: str = "general", default_filename: str | None = None, expiration_timestamp: str | None = None + name: str, + value: str, + kind: str = "general", + default_filename: str | None = None, + expiration_timestamp: str | None = None, ) -> dict[str, Any]: payload = {"name": name, "value": value, "kind": kind} if default_filename: payload["default_filename"] = default_filename if expiration_timestamp: payload["expiration_timestamp"] = expiration_timestamp - _, response = await sanic_client.post("/api/data/user/secrets", headers=user_headers, json=payload) @@ -101,7 +104,7 @@ async def test_get_one_secret(sanic_client: SanicASGITestClient, user_headers, c secret = await create_secret("secret-2", "value-2") await create_secret("secret-3", "value-3") - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret['id']}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None assert response.json["name"] == secret["name"] @@ -115,13 +118,13 @@ async def test_get_one_secret_not_expired(sanic_client: SanicASGITestClient, use secret_1 = await create_secret("secret-1", "value-1", expiration_timestamp=expiration_timestamp) secret_2 = await create_secret("secret-2", "value-2", expiration_timestamp="2029-12-31") - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_1["id"]}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_1['id']}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None assert response.json["name"] == "secret-1" assert response.json["id"] == secret_1["id"] - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_2["id"]}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_2['id']}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None assert response.json["name"] == "secret-2" @@ -129,7 +132,7 @@ async def test_get_one_secret_not_expired(sanic_client: SanicASGITestClient, use time.sleep(20) - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_1["id"]}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret_1['id']}", headers=user_headers) assert response.status_code == 404 @@ -193,7 +196,7 @@ async def test_get_delete_a_secret(sanic_client: SanicASGITestClient, user_heade secret = await create_secret("secret-2", "value-2") await create_secret("secret-3", "value-3") - _, response = await sanic_client.delete(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) + _, response = await sanic_client.delete(f"/api/data/user/secrets/{secret['id']}", headers=user_headers) assert response.status_code == 204, response.text _, response = await sanic_client.get("/api/data/user/secrets", headers=user_headers) @@ -209,12 +212,12 @@ async def test_get_update_a_secret(sanic_client: SanicASGITestClient, user_heade await create_secret("secret-3", "value-3") _, response = await sanic_client.patch( - f"/api/data/user/secrets/{secret["id"]}", headers=user_headers, json={"name": "new-name", "value": "new-value"} + f"/api/data/user/secrets/{secret['id']}", headers=user_headers, json={"name": "new-name", "value": "new-value"} ) assert response.status_code == 422 _, response = await sanic_client.patch( - f"/api/data/user/secrets/{secret["id"]}", headers=user_headers, json={"value": "new-value"} + f"/api/data/user/secrets/{secret['id']}", headers=user_headers, json={"value": "new-value"} ) assert response.status_code == 200, response.text assert response.json is not None @@ -223,7 +226,7 @@ async def test_get_update_a_secret(sanic_client: SanicASGITestClient, user_heade assert response.json["expiration_timestamp"] is None assert "value" not in response.json - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret['id']}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None assert response.json["id"] == secret["id"] @@ -232,13 +235,13 @@ async def test_get_update_a_secret(sanic_client: SanicASGITestClient, user_heade assert "value" not in response.json _, response = await sanic_client.patch( - f"/api/data/user/secrets/{secret["id"]}", + f"/api/data/user/secrets/{secret['id']}", headers=user_headers, json={"value": "newest-value", "expiration_timestamp": "2029-12-31"}, ) assert response.status_code == 200, response.text - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=user_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret['id']}", headers=user_headers) assert response.status_code == 200, response.text assert response.json is not None assert response.json["id"] == secret["id"] @@ -255,7 +258,7 @@ async def test_cannot_get_another_user_secret( secret = await create_secret("secret-2", "value-2") await create_secret("secret-3", "value-3") - _, response = await sanic_client.get(f"/api/data/user/secrets/{secret["id"]}", headers=admin_headers) + _, response = await sanic_client.get(f"/api/data/user/secrets/{secret['id']}", headers=admin_headers) assert response.status_code == 404, response.text assert "cannot be found" in response.json["error"]["message"] diff --git a/test/bases/renku_data_services/data_api/test_storage.py b/test/bases/renku_data_services/data_api/test_storage.py index 976d46675..cc6c121a8 100644 --- a/test/bases/renku_data_services/data_api/test_storage.py +++ b/test/bases/renku_data_services/data_api/test_storage.py @@ -12,8 +12,8 @@ from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from renku_data_services.storage.rclone import RCloneValidator -from renku_data_services.utils.core import get_openbis_session_token from renku_data_services.storage.rclone_patches import BANNED_STORAGE, OAUTH_PROVIDERS +from renku_data_services.utils.core import get_openbis_session_token from test.utils import SanicReusableASGITestClient _valid_storage: dict[str, Any] = { From dc73d0a8f13cb24429f399f12cc1ac76b1558971 Mon Sep 17 00:00:00 2001 From: Tasko Olevski Date: Wed, 8 Oct 2025 22:51:50 +0200 Subject: [PATCH 15/15] squashme: rebase against main --- .github/CODEOWNERS | 2 +- .github/workflows/acceptance-tests.yml | 141 ++-- .github/workflows/save_cache.yml | 2 +- .github/workflows/test_publish.yml | 12 +- .gitignore | 2 + .pre-commit-config.yaml | 4 +- DEVELOPING.md | 24 + Makefile | 74 +- bases/renku_data_services/data_api/app.py | 16 +- bases/renku_data_services/data_api/config.py | 14 +- .../data_api/dependencies.py | 85 ++- bases/renku_data_services/data_api/main.py | 49 +- bases/renku_data_services/k8s_cache/config.py | 29 +- .../k8s_cache/dependencies.py | 3 +- bases/renku_data_services/k8s_cache/main.py | 31 +- .../secrets_storage_api/app.py | 2 +- .../secrets_storage_api/dependencies.py | 51 +- .../secrets_storage_api/main.py | 4 +- .../renku_data_services/app_config/config.py | 14 +- .../renku_data_services/authn/gitlab.py | 11 + .../renku_data_services/authn/keycloak.py | 29 + .../renku_data_services/base_api/auth.py | 27 - .../renku_data_services/base_models/core.py | 2 +- .../connected_services/api.spec.yaml | 39 + .../connected_services/apispec.py | 37 +- .../connected_services/blueprints.py | 19 +- .../connected_services/core.py | 81 ++- .../connected_services/db.py | 154 +++- .../dummy_async_oauth2_client.py | 4 +- .../connected_services/external_models.py | 23 +- .../connected_services/models.py | 81 ++- .../connected_services/orm.py | 9 +- .../connected_services/provider_adapters.py | 107 ++- .../connected_services/utils.py | 33 + .../renku_data_services/crc/api.spec.yaml | 95 +++ components/renku_data_services/crc/apispec.py | 149 +++- .../renku_data_services/crc/blueprints.py | 35 +- components/renku_data_services/crc/core.py | 81 ++- components/renku_data_services/crc/db.py | 118 +-- components/renku_data_services/crc/models.py | 145 +++- components/renku_data_services/crc/orm.py | 86 ++- .../data_connectors/core.py | 4 - .../renku_data_services/data_connectors/db.py | 4 +- components/renku_data_services/git/gitlab.py | 9 + .../k8s/client_interfaces.py | 94 ++- components/renku_data_services/k8s/clients.py | 281 ++++--- components/renku_data_services/k8s/config.py | 108 +-- .../renku_data_services/k8s/constants.py | 9 +- components/renku_data_services/k8s/db.py | 231 ++++++ components/renku_data_services/k8s/models.py | 133 ++-- .../{k8s_watcher => k8s}/orm.py | 17 +- components/renku_data_services/k8s/quota.py | 149 ---- .../k8s/watcher/__init__.py | 5 + .../{k8s_watcher => k8s/watcher}/core.py | 115 +-- .../k8s_watcher/__init__.py | 7 - .../renku_data_services/k8s_watcher/db.py | 102 --- .../renku_data_services/migrations/env.py | 2 +- .../35ea9d8f54e8_add_image_registry_url.py | 30 + ...eat_add_support_for_remote_sessions_to_.py | 60 ++ ...cf6_add_missing_entity_slug_constraints.py | 162 +++++ .../8365db35dc76_create_url_redirects.py | 47 ++ ...499b966_change_cluster_name_and_uniuqe_.py | 44 ++ ...add_strip_prefix_option_to_environments.py | 32 + ...e825d95e_add_generic_oidc_provider_type.py | 35 + .../renku_data_services/namespace/orm.py | 81 ++- .../notebooks/api.spec.yaml | 73 +- .../api/amalthea_patches/init_containers.py | 39 +- .../notebooks/api/amalthea_patches/utils.py | 47 +- .../notebooks/api/classes/data_service.py | 108 +-- .../notebooks/api/classes/image.py | 58 +- .../notebooks/api/classes/k8s_client.py | 102 +-- .../notebooks/api/classes/server.py | 47 +- .../renku_data_services/notebooks/apispec.py | 26 +- .../notebooks/blueprints.py | 368 ++-------- .../notebooks/config/__init__.py | 105 ++- .../notebooks/config/dynamic.py | 22 +- .../renku_data_services/notebooks/core.py | 27 +- .../notebooks/core_sessions.py | 683 ++++++++++++++++-- .../notebooks/cr_amalthea_session.py | 269 ++++--- .../notebooks/cr_jupyter_server.py | 16 +- .../renku_data_services/notebooks/crs.py | 81 ++- .../notebooks/image_check.py | 149 ++++ .../renku_data_services/notebooks/models.py | 60 +- .../notebooks/util/authn.py | 41 -- .../notebooks/util/kubernetes_.py | 3 +- .../renku_data_services/notebooks/utils.py | 77 +- .../platform/api.spec.yaml | 183 +++++ .../renku_data_services/platform/apispec.py | 71 +- .../platform/blueprints.py | 118 ++- .../renku_data_services/platform/core.py | 88 +++ components/renku_data_services/platform/db.py | 136 +++- .../renku_data_services/platform/models.py | 32 + .../renku_data_services/platform/orm.py | 50 +- components/renku_data_services/project/db.py | 24 +- .../repositories/api.spec.yaml | 2 +- .../repositories/blueprints.py | 2 +- .../renku_data_services/repositories/db.py | 31 +- .../repositories/provider_adapters.py | 7 +- .../renku_data_services/secrets/api.spec.yaml | 2 + .../renku_data_services/secrets/apispec.py | 9 +- .../renku_data_services/secrets/blueprints.py | 34 +- .../renku_data_services/secrets/config.py | 6 +- .../renku_data_services/secrets/core.py | 161 +---- components/renku_data_services/secrets/db.py | 48 +- .../renku_data_services/secrets/models.py | 41 +- .../renku_data_services/session/api.spec.yaml | 26 + .../renku_data_services/session/apispec.py | 14 +- .../renku_data_services/session/constants.py | 4 +- .../renku_data_services/session/core.py | 8 +- components/renku_data_services/session/db.py | 10 + .../renku_data_services/session/k8s_client.py | 26 +- .../renku_data_services/session/models.py | 3 + components/renku_data_services/session/orm.py | 6 +- .../renku_data_services/storage/rclone.py | 12 +- .../storage/rclone_patches.py | 20 +- components/renku_data_services/users/db.py | 32 +- .../renku_data_services/users/models.py | 12 +- components/renku_data_services/users/orm.py | 8 +- components/renku_data_services/utils/core.py | 11 - .../manifests/buildstrategy.yaml | 4 +- flake.lock | 12 +- flake.nix | 18 +- poetry.lock | 6 +- projects/k8s_watcher/poetry.lock | 48 +- projects/k8s_watcher/pyproject.toml | 3 +- projects/renku_data_service/Dockerfile | 2 +- projects/renku_data_service/poetry.lock | 48 +- projects/renku_data_service/pyproject.toml | 3 +- projects/renku_data_tasks/poetry.lock | 51 +- projects/renku_data_tasks/pyproject.toml | 3 +- projects/secrets_storage/poetry.lock | 560 ++++++++++++-- projects/secrets_storage/pyproject.toml | 4 +- .../data_api/__snapshots__/test_projects.ambr | 1 + .../data_api/__snapshots__/test_sessions.ambr | 9 + .../data_api/__snapshots__/test_storage.ambr | 132 ---- .../data_api/test_connected_services.py | 25 +- .../data_api/test_data_connectors.py | 62 ++ .../data_api/test_migrations.py | 150 ++++ .../data_api/test_namespaces.py | 219 +++++- .../data_api/test_notebooks.py | 42 +- .../data_api/test_platform_config.py | 288 ++++++++ .../data_api/test_resource_pools.py | 109 ++- .../data_api/test_schemathesis.py | 2 +- .../data_api/test_secret.py | 36 +- .../data_api/test_sessions.py | 26 + .../data_api/test_storage.py | 61 +- .../connected_services/test_db.py | 240 ++++++ .../connected_services/test_encryption.py | 7 +- .../k8s/test_k8s_adapter.py | 20 +- .../test_notebooks_session_patching.py | 26 + .../notebooks/test_notebooks_utils.py | 181 +++++ test/conftest.py | 1 + test/utils.py | 21 +- 153 files changed, 7295 insertions(+), 2448 deletions(-) create mode 100644 components/renku_data_services/k8s/db.py rename components/renku_data_services/{k8s_watcher => k8s}/orm.py (92%) delete mode 100644 components/renku_data_services/k8s/quota.py create mode 100644 components/renku_data_services/k8s/watcher/__init__.py rename components/renku_data_services/{k8s_watcher => k8s/watcher}/core.py (68%) delete mode 100644 components/renku_data_services/k8s_watcher/__init__.py delete mode 100644 components/renku_data_services/k8s_watcher/db.py create mode 100644 components/renku_data_services/migrations/versions/35ea9d8f54e8_add_image_registry_url.py create mode 100644 components/renku_data_services/migrations/versions/3aa50593f4e4_feat_add_support_for_remote_sessions_to_.py create mode 100644 components/renku_data_services/migrations/versions/66e2f1271cf6_add_missing_entity_slug_constraints.py create mode 100644 components/renku_data_services/migrations/versions/8365db35dc76_create_url_redirects.py create mode 100644 components/renku_data_services/migrations/versions/c8061499b966_change_cluster_name_and_uniuqe_.py create mode 100644 components/renku_data_services/migrations/versions/e117405fed51_add_strip_prefix_option_to_environments.py create mode 100644 components/renku_data_services/migrations/versions/fe61e825d95e_add_generic_oidc_provider_type.py create mode 100644 components/renku_data_services/notebooks/image_check.py delete mode 100644 components/renku_data_services/notebooks/util/authn.py create mode 100644 test/components/renku_data_services/connected_services/test_db.py create mode 100644 test/components/renku_data_services/notebooks/test_notebooks_session_patching.py create mode 100644 test/components/renku_data_services/notebooks/test_notebooks_utils.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index d7b98d8db..c4ab9880e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1 +1 @@ -* @SwissDataScienceCenter/renku-python-maintainers +* @SwissDataScienceCenter/renku-python-maintainers @sgaist @SalimKayal diff --git a/.github/workflows/acceptance-tests.yml b/.github/workflows/acceptance-tests.yml index 5e47456a7..f6cb198da 100644 --- a/.github/workflows/acceptance-tests.yml +++ b/.github/workflows/acceptance-tests.yml @@ -1,5 +1,6 @@ name: Acceptance tests + permissions: contents: read pull-requests: read @@ -14,7 +15,7 @@ on: - closed concurrency: - group: ${{ github.workflow }}-${{ github.ref || github.run_id }} + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref || github.run_id }} cancel-in-progress: true jobs: @@ -22,52 +23,54 @@ jobs: name: Analyze deploy string runs-on: ubuntu-24.04 outputs: - pr-contains-string: ${{ steps.deploy-comment.outputs.pr-contains-string }} - renku: ${{ steps.deploy-comment.outputs.renku}} - renku-core: ${{ steps.deploy-comment.outputs.renku-core}} - renku-gateway: ${{ steps.deploy-comment.outputs.renku-gateway}} - renku-graph: ${{ steps.deploy-comment.outputs.renku-graph}} - renku-notebooks: ${{ steps.deploy-comment.outputs.renku-notebooks}} - renku-ui: ${{ steps.deploy-comment.outputs.renku-ui}} - amalthea-sessions: ${{ steps.deploy-comment.outputs.amalthea-sessions}} - amalthea: ${{ steps.deploy-comment.outputs.amalthea}} - test-enabled: ${{ steps.deploy-comment.outputs.test-enabled}} - test-legacy-enabled: ${{ steps.deploy-comment.outputs.test-legacy-enabled}} - extra-values: ${{ steps.deploy-comment.outputs.extra-values}} + renku: ${{ steps.deploy-comment.outputs.renku }} + deploy: ${{ steps.deploy-comment.outputs.pr-contains-string }} + renku-core: ${{ steps.deploy-comment.outputs.renku-core }} + renku-gateway: ${{ steps.deploy-comment.outputs.renku-gateway }} + renku-graph: ${{ steps.deploy-comment.outputs.renku-graph }} + renku-notebooks: ${{ steps.deploy-comment.outputs.renku-notebooks }} + renku-ui: ${{ steps.deploy-comment.outputs.renku-ui }} + renku-data-services: ${{ steps.deploy-comment.outputs.renku-data-services }} + amalthea: ${{ steps.deploy-comment.outputs.amalthea }} + amalthea-sessions: ${{ steps.deploy-comment.outputs.amalthea-sessions }} + test-enabled: ${{ steps.deploy-comment.outputs.test-enabled == 'true' }} + test-legacy-enabled: ${{ steps.deploy-comment.outputs.test-legacy-enabled == 'true' }} + extra-values: ${{ steps.deploy-comment.outputs.extra-values }} steps: - id: deploy-comment - uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.18.2 + uses: SwissDataScienceCenter/renku-actions/check-pr-description@v1.19.1 with: pr_ref: ${{ github.event.number }} deploy-pr: - name: Deploy + name: Deploy on Azure runs-on: ubuntu-24.04 needs: [check-deploy] permissions: pull-requests: write - if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' - environment: - name: renku-ci-ds-${{ github.event.number }} - url: https://renku-ci-ds-${{ github.event.number }}.dev.renku.ch + id-token: write + if: github.event.action != 'closed' && needs.check-deploy.outputs.deploy == 'true' steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: repository: SwissDataScienceCenter/renku sparse-checkout: | minimal-deployment + - name: Login to Docker Hub uses: docker/login-action@v3 with: username: ${{ secrets.RENKU_DOCKER_USERNAME }} password: ${{ secrets.RENKU_DOCKER_PASSWORD }} - - name: Find deplyoment url + + - name: Find deployment url uses: peter-evans/find-comment@v3 id: deploymentUrlMessage with: issue-number: ${{ github.event.pull_request.number }} comment-author: "RenkuBot" body-includes: "You can access the deployment of this PR at" + - name: Create comment deployment url if: steps.deploymentUrlMessage.outputs.comment-id == 0 uses: peter-evans/create-or-update-comment@v4 @@ -76,50 +79,53 @@ jobs: issue-number: ${{ github.event.pull_request.number }} body: | You can access the deployment of this PR at https://renku-ci-ds-${{ github.event.number }}.dev.renku.ch + + - name: Azure login + uses: azure/login@v2 + with: + client-id: ${{ secrets.CI_RENKU_AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.CI_RENKU_AZURE_TENANT_ID }} + subscription-id: ${{ secrets.CI_RENKU_AZURE_SUBSCRIPTION_ID }} + + - name: Set AKS context + uses: azure/aks-set-context@v4 + with: + resource-group: "renku-dev" + cluster-name: "aks-switzerlandnorth-renku-dev" + + - name: Setup kubeconfig + run: | + mv "${KUBECONFIG}" "${{ github.workspace }}/renkubot-kube.config" + echo "KUBECONFIG=${{ github.workspace }}/renkubot-kube.config" >> "$GITHUB_ENV" + - name: Build and deploy - uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.18.2 + uses: SwissDataScienceCenter/renku-actions/deploy-renku@v1.19.1 env: DOCKER_PASSWORD: ${{ secrets.RENKU_DOCKER_PASSWORD }} DOCKER_USERNAME: ${{ secrets.RENKU_DOCKER_USERNAME }} GITLAB_TOKEN: ${{ secrets.DEV_GITLAB_TOKEN }} - KUBECONFIG: ${{ github.workspace }}/renkubot-kube.config RENKU_RELEASE: renku-ci-ds-${{ github.event.number }} - RENKU_VALUES_FILE: ${{ github.workspace }}/values.yaml + RENKU_VALUES_FILE: "${{ github.workspace }}/values.yaml" RENKU_VALUES: minimal-deployment/minimal-deployment-values.yaml - RENKUBOT_KUBECONFIG: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} - RENKUBOT_RANCHER_BEARER_TOKEN: ${{ secrets.RENKUBOT_RANCHER_BEARER_TOKEN }} TEST_ARTIFACTS_PATH: "tests-artifacts-${{ github.sha }}" - renku_ui: "${{ needs.check-deploy.outputs.renku-ui }}" + KUBERNETES_CLUSTER_FQDN: "dev.renku.ch" + RENKU_ANONYMOUS_SESSIONS: "true" + renku: "${{ needs.check-deploy.outputs.renku }}" renku_core: "${{ needs.check-deploy.outputs.renku-core }}" renku_gateway: "${{ needs.check-deploy.outputs.renku-gateway }}" renku_graph: "${{ needs.check-deploy.outputs.renku-graph }}" renku_notebooks: "${{ needs.check-deploy.outputs.renku-notebooks }}" + renku_ui: "${{ needs.check-deploy.outputs.renku-ui }}" renku_data_services: "@${{ github.head_ref }}" amalthea: "${{ needs.check-deploy.outputs.amalthea }}" amalthea_sessions: "${{ needs.check-deploy.outputs.amalthea-sessions }}" extra_values: "${{ needs.check-deploy.outputs.extra-values }}" - legacy-selenium-acceptance-tests: - name: Legacy Selenium tests - needs: [check-deploy, deploy-pr] - if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-legacy-enabled == 'true' - runs-on: ubuntu-24.04 - steps: - - uses: SwissDataScienceCenter/renku-actions/test-renku@v1.18.2 - with: - kubeconfig: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} - renku-release: renku-ci-ds-${{ github.event.number }} - gitlab-token: ${{ secrets.DEV_GITLAB_TOKEN }} - s3-results-access-key: ${{ secrets.ACCEPTANCE_TESTS_BUCKET_ACCESS_KEY }} - s3-results-secret-key: ${{ secrets.ACCEPTANCE_TESTS_BUCKET_SECRET_KEY }} - test-timeout-mins: "60" - legacy-cypress-acceptance-tests: name: Legacy Cypress tests runs-on: ubuntu-24.04 needs: [check-deploy, deploy-pr] - if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-legacy-enabled == 'true' strategy: fail-fast: false matrix: @@ -134,9 +140,13 @@ jobs: steps: - name: Extract Renku repository reference run: echo "RENKU_REFERENCE=`echo '${{ needs.check-deploy.outputs.renku }}' | cut -d'@' -f2`" >> $GITHUB_ENV - - uses: SwissDataScienceCenter/renku-actions/test-renku-cypress@v1.18.2 + - uses: SwissDataScienceCenter/renku-actions/test-renku-cypress@v1.19.1 + if: github.event.action != 'closed' && needs.check-deploy.outputs.deploy == 'true' && needs.check-deploy.outputs.test-legacy-enabled == 'true' + with: + e2e-folder: cypress/e2e/v2/ e2e-target: ${{ matrix.tests }} + kubernetes-cluster-fqdn: dev.renku.ch renku-reference: ${{ env.RENKU_REFERENCE }} renku-release: renku-ci-ds-${{ github.event.number }} test-user-password: ${{ secrets.RENKU_BOT_DEV_PASSWORD }} @@ -145,7 +155,7 @@ jobs: name: Cypress tests runs-on: ubuntu-24.04 needs: [check-deploy, deploy-pr] - if: github.event.action != 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' && needs.check-deploy.outputs.test-enabled == 'true' + if: github.event.action != 'closed' && needs.check-deploy.outputs.deploy == 'true' && needs.check-deploy.outputs.test-enabled == 'true' strategy: fail-fast: false matrix: @@ -160,10 +170,12 @@ jobs: steps: - name: Extract Renku repository reference run: echo "RENKU_REFERENCE=`echo '${{ needs.check-deploy.outputs.renku }}' | cut -d'@' -f2`" >> $GITHUB_ENV - - uses: SwissDataScienceCenter/renku-actions/test-renku-cypress@v1.18.2 + - uses: SwissDataScienceCenter/renku-actions/test-renku-cypress@v1.19.1 + if: github.event.action != 'closed' && needs.check-deploy.outputs.deploy == 'true' && needs.check-deploy.outputs.test-enabled == 'true' with: e2e-folder: cypress/e2e/v2/ e2e-target: ${{ matrix.tests }} + kubernetes-cluster-fqdn: dev.renku.ch renku-reference: ${{ env.RENKU_REFERENCE }} renku-release: renku-ci-ds-${{ github.event.number }} test-user-password: ${{ secrets.RENKU_BOT_DEV_PASSWORD }} @@ -172,17 +184,19 @@ jobs: name: Cleanup runs-on: ubuntu-24.04 needs: check-deploy - if: github.event.action == 'closed' && needs.check-deploy.outputs.pr-contains-string == 'true' + if: github.event.action == 'closed' && needs.check-deploy.outputs.deploy == 'true' permissions: pull-requests: write + id-token: write steps: - - name: Find deplyoment url + - name: Find deployment url uses: peter-evans/find-comment@v3 id: deploymentUrlMessage with: issue-number: ${{ github.event.pull_request.number }} comment-author: "RenkuBot" - body-includes: "Tearing down the temporary RenkuLab deplyoment" + body-includes: "Tearing down the temporary RenkuLab deployment" + - name: Create comment deployment url if: steps.deploymentUrlMessage.outputs.comment-id == 0 uses: peter-evans/create-or-update-comment@v4 @@ -190,12 +204,35 @@ jobs: token: ${{ secrets.RENKUBOT_GITHUB_TOKEN }} issue-number: ${{ github.event.pull_request.number }} body: | - Tearing down the temporary RenkuLab deplyoment for this PR. + Tearing down the temporary RenkuLab deployment for this PR. + + # Azure-specific setup + - name: Azure login + if: needs.check-deploy.outputs.deploy == 'true' + uses: azure/login@v2 + with: + client-id: ${{ secrets.CI_RENKU_AZURE_CLIENT_ID }} + tenant-id: ${{ secrets.CI_RENKU_AZURE_TENANT_ID }} + subscription-id: ${{ secrets.CI_RENKU_AZURE_SUBSCRIPTION_ID }} + + - name: Set AKS context + uses: azure/aks-set-context@v4 + if: needs.check-deploy.outputs.deploy == 'true' + with: + resource-group: "renku-dev" + cluster-name: "aks-switzerlandnorth-renku-dev" + + - name: Setup kubeconfig + if: needs.check-deploy.outputs.deploy == 'true' + run: | + mv "${KUBECONFIG}" "${{ github.workspace }}/renkubot-kube.config" + echo "KUBECONFIG=${{ github.workspace }}/renkubot-kube.config" >> "$GITHUB_ENV" + + # Cleanup for both standard and Azure deployments - name: renku teardown - uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.18.2 + uses: SwissDataScienceCenter/renku-actions/cleanup-renku-ci-deployments@v1.19.1 env: HELM_RELEASE_REGEX: "^renku-ci-ds-${{ github.event.number }}$" GITLAB_TOKEN: ${{ secrets.DEV_GITLAB_TOKEN }} - RENKUBOT_KUBECONFIG: ${{ secrets.RENKUBOT_DEV_KUBECONFIG }} MAX_AGE_SECONDS: 0 DELETE_NAMESPACE: "true" diff --git a/.github/workflows/save_cache.yml b/.github/workflows/save_cache.yml index eb30e0e69..6a91c8efa 100644 --- a/.github/workflows/save_cache.yml +++ b/.github/workflows/save_cache.yml @@ -17,7 +17,7 @@ jobs: DEVCONTAINER_IMAGE_CACHE: ghcr.io/swissdatasciencecenter/renku-data-services/devcontainer steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - name: Login to Docker Hub diff --git a/.github/workflows/test_publish.yml b/.github/workflows/test_publish.yml index 9bb0b27ea..e43cecaa5 100644 --- a/.github/workflows/test_publish.yml +++ b/.github/workflows/test_publish.yml @@ -28,7 +28,7 @@ jobs: image_repository: ${{ steps.docker_image.outputs.image_repository }} image_tag: ${{ steps.docker_image.outputs.image_tag }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - name: Docker image metadata @@ -74,7 +74,7 @@ jobs: needs: - build-devcontainer steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: actions/cache/restore@v4 @@ -101,7 +101,7 @@ jobs: - build-devcontainer - style-checks steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: actions/cache/restore@v4 @@ -140,6 +140,7 @@ jobs: cacheFrom: ${{ needs.build-devcontainer.outputs.image_repository }}:${{ needs.build-devcontainer.outputs.image_tag }} - name: Coveralls Parallel uses: coverallsapp/github-action@v2 + continue-on-error: true with: flag-name: run-main-tests parallel: true @@ -150,7 +151,7 @@ jobs: - build-devcontainer - style-checks steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 - uses: actions/cache/restore@v4 @@ -191,6 +192,7 @@ jobs: cacheFrom: ${{ needs.build-devcontainer.outputs.image_repository }}:${{ needs.build-devcontainer.outputs.image_tag }} - name: Coveralls Parallel uses: coverallsapp/github-action@v2 + continue-on-error: true with: flag-name: run-schemathesis-tests parallel: true @@ -237,7 +239,7 @@ jobs: name: data-tasks steps: - name: Checkout - uses: actions/checkout@v4 + uses: actions/checkout@v5 - name: Docker meta ${{ matrix.service }} id: meta uses: docker/metadata-action@v5 diff --git a/.gitignore b/.gitignore index bc8b0518d..9fd182b69 100644 --- a/.gitignore +++ b/.gitignore @@ -36,6 +36,8 @@ var/ *.manifest *.spec +.encryption_key + # Installer logs pip-log.txt pip-delete-this-directory.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c58be6a92..a0c5d68c5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -20,7 +20,7 @@ repos: - id: check-toml - id: debug-statements - id: end-of-file-fixer - exclude: '(components/renku_data_services/message_queue/(avro_models|schemas))|(.ambr)' + exclude: '(components/renku_data_services/message_queue/(avro_models|schemas))|(.ambr)|.idea' - id: mixed-line-ending - id: trailing-whitespace - exclude: '(components/renku_data_services/message_queue/(avro_models|schemas))|(.ambr)' + exclude: '(components/renku_data_services/message_queue/(avro_models|schemas))|(.ambr)|.idea' diff --git a/DEVELOPING.md b/DEVELOPING.md index 509ce2874..c1a9e127d 100644 --- a/DEVELOPING.md +++ b/DEVELOPING.md @@ -146,6 +146,30 @@ From the root folder of the repository, run: Then you can run the test as usual directly from PyCharm by clicking on the green arrow next to a specific test, or a whole test suite or part of the test hierarchy. +We use [Syrupy](https://github.com/syrupy-project/syrupy) for snapshotting data in tests. + +To update the snapshot data, run the following command in the devcontainer: +```bash +$ poetry run pytest -m "not schemathesis" -n auto --snapshot-update +``` + +### Directly from PyCharm + +From the root folder of the repository, run: + +1. `devcontainer build --workspace-folder .` +2. `devcontainer up --workspace-folder .` +3. `make schemas` +4. `make amalthea_schema` + +> **WARNING:** +> +> Be careful with the kubernetes environment in your shell, as in case of badly setup tests and environment you might try +> to run some tests against your default cluster. + +Then you can run the test as usual directly from PyCharm by clicking on the green arrow next to a specific test, or a +whole test suite or part of the test hierarchy. + ## Migrations We use Alembic for migrations, and we have a single version table for all schemas. This version table diff --git a/Makefile b/Makefile index 477b01f84..f1abb0689 100644 --- a/Makefile +++ b/Makefile @@ -1,52 +1,44 @@ -AMALTHEA_JS_VERSION ?= 0.20.0 -AMALTHEA_SESSIONS_VERSION ?= 0.20.0 -CODEGEN_PARAMS := \ - --input-file-type openapi \ - --output-model-type pydantic_v2.BaseModel \ - --use-double-quotes \ - --target-python-version 3.13 \ - --collapse-root-models \ - --field-constraints \ - --strict-nullable \ - --set-default-enum-member \ - --openapi-scopes schemas paths parameters \ - --set-default-enum-member \ - --use-one-literal-as-default \ - --use-default -CR_CODEGEN_PARAMS := \ - --input-file-type jsonschema \ +AMALTHEA_JS_VERSION ?= 0.22.0 +AMALTHEA_SESSIONS_VERSION ?= 0.22.0 +COMMON_CODEGEN_PARAMS := \ --output-model-type pydantic_v2.BaseModel \ --use-double-quotes \ --target-python-version 3.13 \ - --collapse-root-models \ --field-constraints \ - --strict-nullable \ + --strict-nullable +API_CODEGEN_PARAMS := \ + --input-file-type openapi \ + ${COMMON_CODEGEN_PARAMS} \ + --collapse-root-models \ + --set-default-enum-member \ + --openapi-scopes schemas paths parameters \ + --use-one-literal-as-default \ + --use-default +CR_CODEGEN_PARAMS := \ + --input-file-type jsonschema \ + ${COMMON_CODEGEN_PARAMS} \ + --collapse-root-models \ --allow-extra-fields \ - --use-default-kwarg + --use-default-kwarg \ + --use-generic-container-types # A separate set of params without the --collaps-root-models option as # this causes a bug in the code generator related to list of unions. # https://github.com/koxudaxi/datamodel-code-generator/issues/1937 SEARCH_CODEGEN_PARAMS := \ - --input-file-type openapi \ - --output-model-type pydantic_v2.BaseModel \ - --use-double-quotes \ - --target-python-version 3.13 \ - --field-constraints \ - --strict-nullable \ - --set-default-enum-member \ - --openapi-scopes schemas paths parameters \ - --set-default-enum-member \ - --use-one-literal-as-default \ - --use-default + --input-file-type openapi \ + ${COMMON_CODEGEN_PARAMS} \ + --set-default-enum-member \ + --openapi-scopes schemas paths parameters \ + --use-one-literal-as-default \ + --use-default .PHONY: all all: help ##@ Apispec -# If you add a new api spec, add the `apispec.py` file here and as a -# target/dependency below +# If you add a new api spec, add the `apispec.py` file here. API_SPECS := \ components/renku_data_services/crc/apispec.py \ components/renku_data_services/storage/apispec.py \ @@ -62,20 +54,6 @@ API_SPECS := \ components/renku_data_services/data_connectors/apispec.py \ components/renku_data_services/search/apispec.py -components/renku_data_services/crc/apispec.py: components/renku_data_services/crc/api.spec.yaml -components/renku_data_services/storage/apispec.py: components/renku_data_services/storage/api.spec.yaml -components/renku_data_services/users/apispec.py: components/renku_data_services/users/api.spec.yaml -components/renku_data_services/project/apispec.py: components/renku_data_services/project/api.spec.yaml -components/renku_data_services/session/apispec.py: components/renku_data_services/session/api.spec.yaml -components/renku_data_services/namespace/apispec.py: components/renku_data_services/namespace/api.spec.yaml -components/renku_data_services/secrets/apispec.py: components/renku_data_services/secrets/api.spec.yaml -components/renku_data_services/connected_services/apispec.py: components/renku_data_services/connected_services/api.spec.yaml -components/renku_data_services/repositories/apispec.py: components/renku_data_services/repositories/api.spec.yaml -components/renku_data_services/notebooks/apispec.py: components/renku_data_services/notebooks/api.spec.yaml -components/renku_data_services/platform/apispec.py: components/renku_data_services/platform/api.spec.yaml -components/renku_data_services/data_connectors/apispec.py: components/renku_data_services/data_connectors/api.spec.yaml -components/renku_data_services/search/apispec.py: components/renku_data_services/search/api.spec.yaml - schemas: ${API_SPECS} ## Generate pydantic classes from apispec yaml files @echo "generated classes based on ApiSpec" @@ -193,7 +171,7 @@ help: ## Display this help. # Pattern rules -API_SPEC_CODEGEN_PARAMS := ${CODEGEN_PARAMS} +API_SPEC_CODEGEN_PARAMS := ${API_CODEGEN_PARAMS} %/apispec.py: %/api.spec.yaml $(if $(findstring /search/, $(<)), $(eval API_SPEC_CODEGEN_PARAMS=${SEARCH_CODEGEN_PARAMS})) poetry run datamodel-codegen --input $< --output $@ --base-class $(subst /,.,$(subst .py,_base.BaseAPISpec,$(subst components/,,$@))) ${API_SPEC_CODEGEN_PARAMS} diff --git a/bases/renku_data_services/data_api/app.py b/bases/renku_data_services/data_api/app.py index fdbdb6e41..ebff46133 100644 --- a/bases/renku_data_services/data_api/app.py +++ b/bases/renku_data_services/data_api/app.py @@ -26,7 +26,7 @@ from renku_data_services.data_connectors.blueprints import DataConnectorsBP from renku_data_services.namespace.blueprints import GroupsBP from renku_data_services.notebooks.blueprints import NotebooksBP, NotebooksNewBP -from renku_data_services.platform.blueprints import PlatformConfigBP +from renku_data_services.platform.blueprints import PlatformConfigBP, PlatformUrlRedirectBP from renku_data_services.project.blueprints import ProjectsBP, ProjectSessionSecretBP from renku_data_services.repositories.blueprints import RepositoriesBP from renku_data_services.search.blueprints import SearchBP @@ -182,7 +182,6 @@ def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: url_prefix=url_prefix, connected_services_repo=dm.connected_services_repo, authenticator=dm.authenticator, - internal_gitlab_authenticator=dm.gitlab_authenticator, ) repositories = RepositoriesBP( name="repositories", @@ -197,10 +196,10 @@ def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: authenticator=dm.authenticator, nb_config=dm.config.nb_config, internal_gitlab_authenticator=dm.gitlab_authenticator, - git_repo=dm.git_repositories_repo, rp_repo=dm.rp_repo, user_repo=dm.kc_user_repo, storage_repo=dm.storage_repo, + git_provider_helper=dm.git_provider_helper, ) notebooks_new = NotebooksNewBP( name="notebooks", @@ -218,6 +217,8 @@ def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: cluster_repo=dm.cluster_repo, internal_gitlab_authenticator=dm.gitlab_authenticator, metrics=dm.metrics, + connected_svcs_repo=dm.connected_services_repo, + git_provider_helper=dm.git_provider_helper, ) platform_config = PlatformConfigBP( name="platform_config", @@ -225,6 +226,12 @@ def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: platform_repo=dm.platform_repo, authenticator=dm.authenticator, ) + platform_redirects = PlatformUrlRedirectBP( + name="platform_redirects", + url_prefix=url_prefix, + url_redirect_repo=dm.url_redirect_repo, + authenticator=dm.authenticator, + ) search = SearchBP( name="search2", url_prefix=url_prefix, @@ -278,12 +285,13 @@ def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: platform_config.blueprint(), search.blueprint(), data_connectors.blueprint(), + platform_redirects.blueprint(), ] ) if builds is not None: app.blueprint(builds.blueprint()) - # We need to patch sanic_extz as since version 24.12 they only send a string representation of errors + # We need to patch sanic_ext as since version 24.12 they only send a string representation of errors import sanic_ext.extras.validation.setup sanic_ext.extras.validation.setup.validate_body = _patched_validate_body diff --git a/bases/renku_data_services/data_api/config.py b/bases/renku_data_services/data_api/config.py index 1af9d86d5..2e67074b6 100644 --- a/bases/renku_data_services/data_api/config.py +++ b/bases/renku_data_services/data_api/config.py @@ -21,6 +21,7 @@ class Config: """Application configuration.""" + enable_internal_gitlab: bool dummy_stores: bool k8s_namespace: str k8s_config_root: str @@ -43,6 +44,7 @@ class Config: @classmethod def from_env(cls, db: DBConfig | None = None) -> Self: """Load config from environment.""" + enable_internal_gitlab = os.getenv("ENABLE_INTERNAL_GITLAB", "true").lower() == "true" dummy_stores = os.environ.get("DUMMY_STORES", "false").lower() == "true" if db is None: @@ -53,18 +55,22 @@ def from_env(cls, db: DBConfig | None = None) -> Self: gitlab_url = None else: keycloak = KeycloakConfig.from_env() - gitlab_url = os.environ.get("GITLAB_URL") - if gitlab_url is None: - raise errors.ConfigurationError(message="Please provide the gitlab instance URL") + if enable_internal_gitlab: + gitlab_url = os.environ.get("GITLAB_URL") + if gitlab_url is None: + raise errors.ConfigurationError(message="Please provide the gitlab instance URL") + else: + gitlab_url = None return cls( + enable_internal_gitlab=enable_internal_gitlab, version=os.environ.get("VERSION", "0.0.1"), dummy_stores=dummy_stores, k8s_namespace=os.environ.get("K8S_NAMESPACE", "default"), k8s_config_root=os.environ.get("K8S_CONFIGS_ROOT", "/secrets/kube_configs"), db=db, builds=BuildsConfig.from_env(), - nb_config=NotebooksConfig.from_env(db), + nb_config=NotebooksConfig.from_env(db, enable_internal_gitlab=enable_internal_gitlab), secrets=PublicSecretsConfig.from_env(), sentry=SentryConfig.from_env(), posthog=PosthogConfig.from_env(), diff --git a/bases/renku_data_services/data_api/dependencies.py b/bases/renku_data_services/data_api/dependencies.py index 4468568f2..abc3d4848 100644 --- a/bases/renku_data_services/data_api/dependencies.py +++ b/bases/renku_data_services/data_api/dependencies.py @@ -1,5 +1,7 @@ """Dependency management for data api.""" +from __future__ import annotations + import functools import os from dataclasses import dataclass, field @@ -7,7 +9,6 @@ from typing import Any from authlib.integrations.httpx_client import AsyncOAuth2Client -from jwt import PyJWKClient from yaml import safe_load import renku_data_services.base_models as base_models @@ -19,9 +20,8 @@ import renku_data_services.search import renku_data_services.storage import renku_data_services.users -from renku_data_services import errors from renku_data_services.authn.dummy import DummyAuthenticator, DummyUserStore -from renku_data_services.authn.gitlab import GitlabAuthenticator +from renku_data_services.authn.gitlab import EmptyGitlabAuthenticator, GitlabAuthenticator from renku_data_services.authn.keycloak import KcUserStore, KeycloakAuthenticator from renku_data_services.authz.authz import Authz from renku_data_services.connected_services.db import ConnectedServicesRepository @@ -37,7 +37,7 @@ DataConnectorRepository, DataConnectorSecretRepository, ) -from renku_data_services.git.gitlab import DummyGitlabAPI, GitlabAPI +from renku_data_services.git.gitlab import DummyGitlabAPI, EmptyGitlabAPI, GitlabAPI from renku_data_services.k8s.clients import ( DummyCoreClient, DummySchedulingClient, @@ -46,15 +46,15 @@ K8sSchedulingClient, ) from renku_data_services.k8s.config import KubeConfigEnv -from renku_data_services.k8s.quota import QuotaRepository -from renku_data_services.k8s_watcher import K8sDbCache +from renku_data_services.k8s.db import K8sDbCache, QuotaRepository from renku_data_services.message_queue.db import ReprovisioningRepository from renku_data_services.metrics.core import StagingMetricsService from renku_data_services.metrics.db import MetricsRepository from renku_data_services.namespace.db import GroupRepository -from renku_data_services.notebooks.config import get_clusters +from renku_data_services.notebooks.api.classes.data_service import DummyGitProviderHelper, GitProviderHelper +from renku_data_services.notebooks.config import GitProviderHelperProto, get_clusters from renku_data_services.notebooks.constants import AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK -from renku_data_services.platform.db import PlatformRepository +from renku_data_services.platform.db import PlatformRepository, UrlRedirectRepository from renku_data_services.project.db import ( ProjectMemberRepository, ProjectMigrationRepository, @@ -75,7 +75,7 @@ from renku_data_services.users.dummy_kc_api import DummyKeycloakAPI from renku_data_services.users.kc_api import IKeycloakAPI, KeycloakAPI from renku_data_services.users.models import UnsavedUserInfo -from renku_data_services.utils.core import merge_api_specs, oidc_discovery +from renku_data_services.utils.core import merge_api_specs default_resource_pool = crc_models.ResourcePool( name="default", @@ -141,6 +141,8 @@ class DependencyManager: metrics_repo: MetricsRepository metrics: StagingMetricsService shipwright_client: ShipwrightClient | None + url_redirect_repo: UrlRedirectRepository + git_provider_helper: GitProviderHelperProto spec: dict[str, Any] = field(init=False, repr=False, default_factory=dict) app_name: str = "renku_data_services" @@ -203,7 +205,7 @@ def __post_init__(self) -> None: self.default_resource_pool = generate_default_resource_pool(options, defaults) @classmethod - def from_env(cls) -> "DependencyManager": + def from_env(cls) -> DependencyManager: """Create a config from environment variables.""" user_store: base_models.UserStore @@ -216,6 +218,12 @@ def from_env(cls) -> "DependencyManager": kc_api: IKeycloakAPI cluster_repo = ClusterRepository(session_maker=config.db.async_session_maker) + connected_services_repo = ConnectedServicesRepository( + session_maker=config.db.async_session_maker, + encryption_key=config.secrets.encryption_key, + async_oauth2_client_class=cls.async_oauth2_client_class, + ) + if config.dummy_stores: authenticator = DummyAuthenticator() gitlab_authenticator = DummyAuthenticator() @@ -230,24 +238,21 @@ def from_env(cls) -> "DependencyManager": UnsavedUserInfo(id="user2", first_name="user2", last_name="doe", email="user2@doe.com"), ] kc_api = DummyKeycloakAPI(users=[i.to_keycloak_dict() for i in dummy_users]) + git_provider_helper: GitProviderHelperProto = DummyGitProviderHelper() else: + git_provider_helper = GitProviderHelper.create(connected_services_repo, config.enable_internal_gitlab) quota_repo = QuotaRepository(K8sCoreClient(), K8sSchedulingClient(), namespace=config.k8s_namespace) assert config.keycloak is not None - oidc_disc_data = oidc_discovery(config.keycloak.url, config.keycloak.realm) - jwks_url = oidc_disc_data.get("jwks_uri") - if jwks_url is None: - raise errors.ConfigurationError( - message="The JWKS url for Keycloak cannot be found from the OIDC discovery endpoint." - ) - jwks = PyJWKClient(jwks_url) - if config.keycloak.algorithms is None: - raise errors.ConfigurationError(message="At least one token signature algorithm is required.") - authenticator = KeycloakAuthenticator(jwks=jwks, algorithms=config.keycloak.algorithms) - assert config.gitlab_url is not None - gitlab_authenticator = GitlabAuthenticator(gitlab_url=config.gitlab_url) + authenticator = KeycloakAuthenticator.new(config.keycloak) + if config.enable_internal_gitlab: + assert config.gitlab_url + gitlab_authenticator = GitlabAuthenticator(gitlab_url=config.gitlab_url) + gitlab_client = GitlabAPI(gitlab_url=config.gitlab_url) + else: + gitlab_authenticator = EmptyGitlabAuthenticator() + gitlab_client = EmptyGitlabAPI() user_store = KcUserStore(keycloak_url=config.keycloak.url, realm=config.keycloak.realm) - gitlab_client = GitlabAPI(gitlab_url=config.gitlab_url) kc_api = KeycloakAPI( keycloak_url=config.keycloak.url, client_id=config.keycloak.client_id, @@ -255,22 +260,18 @@ def from_env(cls) -> "DependencyManager": realm=config.keycloak.realm, ) if config.builds.enabled: - # NOTE: we need to get an async client as a sync client can't be used in an async way - # But all the config code is not async, so we need to drop into the running loop, if there is one - kr8s_api = KubeConfigEnv().api() k8s_db_cache = K8sDbCache(config.db.async_session_maker) - client = K8sClusterClientsPool( - get_clusters=get_clusters( - kube_conf_root_dir=config.k8s_config_root, - namespace=config.k8s_namespace, - api=kr8s_api, - cluster_rp=cluster_repo, - ), - cache=k8s_db_cache, - kinds_to_cache=[AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK, BUILD_RUN_GVK, TASK_RUN_GVK], - ) + default_kubeconfig = KubeConfigEnv() shipwright_client = ShipwrightClient( - client=client, + client=K8sClusterClientsPool( + get_clusters( + kube_conf_root_dir=config.k8s_config_root, + default_kubeconfig=default_kubeconfig, + cluster_repo=cluster_repo, + cache=k8s_db_cache, + kinds_to_cache=[AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK, BUILD_RUN_GVK, TASK_RUN_GVK], + ), + ), namespace=config.k8s_namespace, ) @@ -344,20 +345,16 @@ def from_env(cls) -> "DependencyManager": user_repo=kc_user_repo, secret_service_public_key=config.secrets.public_key, ) - connected_services_repo = ConnectedServicesRepository( - session_maker=config.db.async_session_maker, - encryption_key=config.secrets.encryption_key, - async_oauth2_client_class=cls.async_oauth2_client_class, - internal_gitlab_url=config.gitlab_url, - ) git_repositories_repo = GitRepositoriesRepository( session_maker=config.db.async_session_maker, connected_services_repo=connected_services_repo, internal_gitlab_url=config.gitlab_url, + enable_internal_gitlab=config.enable_internal_gitlab, ) platform_repo = PlatformRepository( session_maker=config.db.async_session_maker, ) + url_redirect_repo = UrlRedirectRepository(session_maker=config.db.async_session_maker, authz=authz) data_connector_repo = DataConnectorRepository( session_maker=config.db.async_session_maker, authz=authz, @@ -417,4 +414,6 @@ def from_env(cls) -> "DependencyManager": shipwright_client=shipwright_client, authz=authz, low_level_user_secrets_repo=low_level_user_secrets_repo, + url_redirect_repo=url_redirect_repo, + git_provider_helper=git_provider_helper, ) diff --git a/bases/renku_data_services/data_api/main.py b/bases/renku_data_services/data_api/main.py index 831a9018c..26284a522 100644 --- a/bases/renku_data_services/data_api/main.py +++ b/bases/renku_data_services/data_api/main.py @@ -2,11 +2,11 @@ import argparse import asyncio +import os from os import environ from typing import TYPE_CHECKING, Any import sentry_sdk -import uvloop from sanic import Request, Sanic from sanic.response import BaseHTTPResponse from sanic.worker.loader import AppLoader @@ -20,7 +20,7 @@ from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId from renku_data_services.data_api.app import register_all_handlers from renku_data_services.data_api.dependencies import DependencyManager -from renku_data_services.data_api.prometheus import setup_app_metrics, setup_prometheus +from renku_data_services.data_api.prometheus import setup_prometheus from renku_data_services.errors.errors import ( ForbiddenError, MissingResourceError, @@ -28,6 +28,7 @@ ValidationError, ) from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.search.reprovision import SearchReprovision from renku_data_services.solr.solr_migrate import SchemaMigrator from renku_data_services.storage.rclone import RCloneValidator from renku_data_services.utils.middleware import validate_null_byte @@ -39,25 +40,28 @@ logger = logging.getLogger(__name__) -async def _solr_reindex(app: Sanic) -> None: +async def solr_reindex(reprovision: SearchReprovision) -> None: """Run a solr reindex of all data. This might be required after migrating the solr schema. """ - config = DependencyManager.from_env() - reprovision = config.search_reprovisioning - admin = InternalServiceAdmin(id=ServiceAdminId.search_reprovision) - await reprovision.run_reprovision(admin) - - -def solr_reindex(app_name: str) -> None: - """Runs a solr reindex.""" - app = Sanic(app_name) - setup_app_metrics(app) - logger.info("Running SOLR reindex triggered by a migration") - asyncio.set_event_loop(uvloop.new_event_loop()) - asyncio.run(_solr_reindex(app)) + admin = InternalServiceAdmin(id=ServiceAdminId.search_reprovision) + max_retries = 30 + i = 0 + while True: + try: + await reprovision.run_reprovision(admin) + except Exception as err: + logger.error("SOLR reindexing triggered by a migration has failed because of %s. Will wait and retry.", err) + else: + logger.info("SOLR reindexing triggered by a migration completed successfully") + break + i += 1 + if i >= max_retries: + logger.error(f"SOLR reindexing triggered by a migration has failed {max_retries} times, giving up.") + break + await asyncio.sleep(10) def create_app() -> Sanic: @@ -94,6 +98,7 @@ async def setup_sentry(_: Sanic) -> None: sentry_sdk.init( dsn=dependency_manager.config.sentry.dsn, environment=dependency_manager.config.sentry.environment, + release=dependency_manager.config.sentry.release or None, integrations=[ AsyncioIntegration(), SanicIntegration(unsampled_statuses={404, 403, 401}), @@ -168,12 +173,12 @@ async def setup_rclone_validator(app: Sanic) -> None: validator = RCloneValidator() app.ext.dependency(validator) - @app.main_process_ready + @app.after_server_start async def ready(app: Sanic) -> None: """Application ready event handler.""" if getattr(app.ctx, "solr_reindex", False): - logger.info("Starting solr reindex, as required by migrations.") - app.manager.manage("SolrReindex", solr_reindex, {"app_name": app.name}, transient=True) + logger.info("Creating solr reindex task, as required by migrations.") + app.add_task(solr_reindex(dependency_manager.search_reprovisioning)) @app.before_server_start async def logging_setup1(app: Sanic) -> None: @@ -199,4 +204,8 @@ async def logging_setup2(app: Sanic) -> None: loader = AppLoader(factory=create_app) app = loader.load() app.prepare(**args) - Sanic.serve(primary=app, app_loader=loader) + if os.name == "posix" and args.get("single_process", False): + Sanic.start_method = "fork" + Sanic.serve(primary=app) + else: + Sanic.serve(primary=app, app_loader=loader) diff --git a/bases/renku_data_services/k8s_cache/config.py b/bases/renku_data_services/k8s_cache/config.py index a9e4d7f20..378900083 100644 --- a/bases/renku_data_services/k8s_cache/config.py +++ b/bases/renku_data_services/k8s_cache/config.py @@ -1,10 +1,11 @@ """K8s cache config.""" +from __future__ import annotations + +import os from dataclasses import dataclass from typing import Self -from kubernetes.client.api_client import os - from renku_data_services.db_config.config import DBConfig @@ -31,7 +32,7 @@ class _MetricsConfig: enabled: bool @classmethod - def from_env(cls) -> "_MetricsConfig": + def from_env(cls) -> _MetricsConfig: """Create metrics config from environment variables.""" enabled = os.environ.get("POSTHOG_ENABLED", "false").lower() == "true" return cls(enabled) @@ -44,12 +45,25 @@ class _ImageBuilderConfig: enabled: bool @classmethod - def from_env(cls) -> "_ImageBuilderConfig": + def from_env(cls) -> _ImageBuilderConfig: """Load values from environment variables.""" enabled = os.environ.get("IMAGE_BUILDERS_ENABLED", "false").lower() == "true" return cls(enabled=enabled) +@dataclass +class _V1ServicesConfig: + """Configuration for v1 services.""" + + enabled: bool + + @classmethod + def from_env(cls) -> _V1ServicesConfig: + """Load values from environment variables.""" + enabled = os.environ.get("V1_SERVICES_ENABLED", "false").lower() == "true" + return cls(enabled=enabled) + + @dataclass class Config: """K8s cache config.""" @@ -58,19 +72,20 @@ class Config: k8s: _K8sConfig metrics: _MetricsConfig image_builders: _ImageBuilderConfig + v1_services: _V1ServicesConfig @classmethod - def from_env(cls) -> "Config": + def from_env(cls) -> Config: """Create a config from environment variables.""" db = DBConfig.from_env() k8s = _K8sConfig.from_env() metrics = _MetricsConfig.from_env() - image_builders = _ImageBuilderConfig.from_env() - + v1_services = _V1ServicesConfig.from_env() return cls( db=db, k8s=k8s, metrics=metrics, image_builders=image_builders, + v1_services=v1_services, ) diff --git a/bases/renku_data_services/k8s_cache/dependencies.py b/bases/renku_data_services/k8s_cache/dependencies.py index 6f934cf34..f01d706ea 100644 --- a/bases/renku_data_services/k8s_cache/dependencies.py +++ b/bases/renku_data_services/k8s_cache/dependencies.py @@ -4,9 +4,8 @@ from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient -from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s.db import K8sDbCache, QuotaRepository from renku_data_services.k8s_cache.config import Config -from renku_data_services.k8s_watcher.db import K8sDbCache from renku_data_services.metrics.core import StagingMetricsService from renku_data_services.metrics.db import MetricsRepository diff --git a/bases/renku_data_services/k8s_cache/main.py b/bases/renku_data_services/k8s_cache/main.py index 5ce82e3d8..ffdd43b8f 100644 --- a/bases/renku_data_services/k8s_cache/main.py +++ b/bases/renku_data_services/k8s_cache/main.py @@ -2,12 +2,12 @@ import asyncio -import kr8s - from renku_data_services.app_config import logging -from renku_data_services.k8s.config import get_clusters +from renku_data_services.k8s.clients import K8sClusterClient +from renku_data_services.k8s.config import KubeConfigEnv, get_clusters +from renku_data_services.k8s.constants import ClusterId +from renku_data_services.k8s.watcher import K8sWatcher, k8s_object_handler from renku_data_services.k8s_cache.dependencies import DependencyManager -from renku_data_services.k8s_watcher import K8sWatcher, k8s_object_handler from renku_data_services.notebooks.constants import AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK from renku_data_services.session.constants import BUILD_RUN_GVK, TASK_RUN_GVK @@ -18,22 +18,25 @@ async def main() -> None: """K8s cache entrypoint.""" dm = DependencyManager.from_env() + default_kubeconfig = KubeConfigEnv() - kr8s_api = await kr8s.asyncio.api() - - clusters = await get_clusters( + clusters: dict[ClusterId, K8sClusterClient] = {} + async for client in get_clusters( kube_conf_root_dir=dm.config.k8s.kube_config_root, - namespace=dm.config.k8s.renku_namespace, - api=kr8s_api, - cluster_rp=dm.cluster_repo(), - ) - - kinds = [AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK] + default_kubeconfig=default_kubeconfig, + cluster_repo=dm.cluster_repo(), + ): + clusters[client.get_cluster().id] = client + + kinds = [AMALTHEA_SESSION_GVK] + if dm.config.v1_services.enabled: + kinds.append(JUPYTER_SESSION_GVK) if dm.config.image_builders.enabled: kinds.extend([BUILD_RUN_GVK, TASK_RUN_GVK]) + logger.info(f"Resources: {kinds}") watcher = K8sWatcher( handler=k8s_object_handler(dm.k8s_cache, dm.metrics, rp_repo=dm.rp_repo), - clusters={c.id: c for c in clusters}, + clusters=clusters, kinds=kinds, db_cache=dm.k8s_cache, ) diff --git a/bases/renku_data_services/secrets_storage_api/app.py b/bases/renku_data_services/secrets_storage_api/app.py index 00d44cbcc..019b711ec 100644 --- a/bases/renku_data_services/secrets_storage_api/app.py +++ b/bases/renku_data_services/secrets_storage_api/app.py @@ -19,7 +19,7 @@ def register_all_handlers(app: Sanic, dm: DependencyManager) -> Sanic: authenticator=dm.authenticator, secret_service_private_key=dm.config.secrets.private_key, previous_secret_service_private_key=dm.config.secrets.previous_private_key, - core_client=dm.core_client, + client=dm.secret_client, ) misc = MiscBP(name="misc", url_prefix=url_prefix, apispec=dm.config.spec, version=dm.config.version) app.blueprint([secrets_storage.blueprint(), misc.blueprint()]) diff --git a/bases/renku_data_services/secrets_storage_api/dependencies.py b/bases/renku_data_services/secrets_storage_api/dependencies.py index dd3c684c5..1b67ef453 100644 --- a/bases/renku_data_services/secrets_storage_api/dependencies.py +++ b/bases/renku_data_services/secrets_storage_api/dependencies.py @@ -1,17 +1,23 @@ """Dependencies management of secrets storage.""" -from dataclasses import dataclass, field +from __future__ import annotations -from jwt import PyJWKClient +import os +from dataclasses import dataclass, field -from renku_data_services import base_models, errors +from renku_data_services import base_models from renku_data_services.authn.dummy import DummyAuthenticator from renku_data_services.authn.keycloak import KeycloakAuthenticator -from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface -from renku_data_services.k8s.clients import DummyCoreClient, K8sCoreClient +from renku_data_services.crc.db import ClusterRepository +from renku_data_services.k8s.client_interfaces import SecretClient +from renku_data_services.k8s.clients import ( + DummyCoreClient, + K8sClusterClientsPool, + K8sSecretClient, +) +from renku_data_services.k8s.config import KubeConfigEnv, get_clusters from renku_data_services.secrets.db import LowLevelUserSecretsRepo from renku_data_services.secrets_storage_api.config import Config -from renku_data_services.utils.core import oidc_discovery @dataclass @@ -20,7 +26,7 @@ class DependencyManager: authenticator: base_models.Authenticator config: Config - core_client: K8sCoreClientInterface + secret_client: SecretClient _user_secrets_repo: LowLevelUserSecretsRepo | None = field(default=None, repr=False, init=False) @property @@ -33,32 +39,33 @@ def user_secrets_repo(self) -> LowLevelUserSecretsRepo: return self._user_secrets_repo @classmethod - def from_env(cls) -> "DependencyManager": + def from_env(cls) -> DependencyManager: """Create a config from environment variables.""" authenticator: base_models.Authenticator - core_client: K8sCoreClientInterface + secret_client: SecretClient config = Config.from_env() + cluster_repo = ClusterRepository(session_maker=config.db.async_session_maker) if config.dummy_stores: authenticator = DummyAuthenticator() - core_client = DummyCoreClient({}, {}) + secret_client = DummyCoreClient({}, {}) else: assert config.keycloak is not None - oidc_disc_data = oidc_discovery(config.keycloak.url, config.keycloak.realm) - jwks_url = oidc_disc_data.get("jwks_uri") - if jwks_url is None: - raise errors.ConfigurationError( - message="The JWKS url for Keycloak cannot be found from the OIDC discovery endpoint." - ) - jwks = PyJWKClient(jwks_url) - if config.keycloak.algorithms is None: - raise errors.ConfigurationError(message="At least one token signature algorithm is required.") + authenticator = KeycloakAuthenticator.new(config.keycloak) + default_kubeconfig = KubeConfigEnv() - authenticator = KeycloakAuthenticator(jwks=jwks, algorithms=config.keycloak.algorithms) - core_client = K8sCoreClient() + secret_client = K8sSecretClient( + K8sClusterClientsPool( + get_clusters( + kube_conf_root_dir=os.environ.get("K8S_CONFIGS_ROOT", "/secrets/kube_configs"), + default_kubeconfig=default_kubeconfig, + cluster_repo=cluster_repo, + ) + ) + ) return cls( config=config, authenticator=authenticator, - core_client=core_client, + secret_client=secret_client, ) diff --git a/bases/renku_data_services/secrets_storage_api/main.py b/bases/renku_data_services/secrets_storage_api/main.py index 29234f8df..4d5b420da 100644 --- a/bases/renku_data_services/secrets_storage_api/main.py +++ b/bases/renku_data_services/secrets_storage_api/main.py @@ -12,7 +12,6 @@ from renku_data_services.app_config import logging from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId -from renku_data_services.secrets.core import rotate_encryption_keys from renku_data_services.secrets_storage_api.app import register_all_handlers from renku_data_services.secrets_storage_api.dependencies import DependencyManager @@ -56,11 +55,10 @@ async def rotate_encryption_key_listener(app: Sanic) -> None: return # only run rotation on one worker try: - await rotate_encryption_keys( + await dm.user_secrets_repo.rotate_encryption_keys( InternalServiceAdmin(id=ServiceAdminId.secrets_rotation), dm.config.secrets.private_key, dm.config.secrets.previous_private_key, - dm.user_secrets_repo, ) finally: app.shared_ctx.rotation_lock.release() diff --git a/components/renku_data_services/app_config/config.py b/components/renku_data_services/app_config/config.py index 3d31afcc5..4e13bcdb0 100644 --- a/components/renku_data_services/app_config/config.py +++ b/components/renku_data_services/app_config/config.py @@ -9,6 +9,8 @@ instantiated multiple times without creating multiple database connections. """ +from __future__ import annotations + import os from dataclasses import dataclass @@ -26,7 +28,7 @@ class KeycloakConfig: algorithms: list[str] | None @classmethod - def from_env(cls) -> "KeycloakConfig": + def from_env(cls) -> KeycloakConfig: """Load config from environment values.""" url = os.environ.get("KEYCLOAK_URL") if url is None: @@ -55,17 +57,19 @@ class SentryConfig: enabled: bool dsn: str environment: str + release: str sample_rate: float = 0.2 @classmethod - def from_env(cls) -> "SentryConfig": + def from_env(cls) -> SentryConfig: """Create a config from environment variables.""" enabled = os.environ.get("SENTRY_ENABLED", "false").lower() == "true" dsn = os.environ.get("SENTRY_DSN", "") environment = os.environ.get("SENTRY_ENVIRONMENT", "") + release = os.environ.get("VERSION", "") sample_rate = float(os.environ.get("SENTRY_SAMPLE_RATE", "0.2")) - return cls(enabled, dsn=dsn, environment=environment, sample_rate=sample_rate) + return cls(enabled, dsn=dsn, environment=environment, release=release, sample_rate=sample_rate) @dataclass @@ -75,7 +79,7 @@ class PosthogConfig: enabled: bool @classmethod - def from_env(cls) -> "PosthogConfig": + def from_env(cls) -> PosthogConfig: """Create posthog config from environment variables.""" enabled = os.environ.get("POSTHOG_ENABLED", "false").lower() == "true" @@ -90,7 +94,7 @@ class TrustedProxiesConfig: real_ip_header: str | None = None @classmethod - def from_env(cls) -> "TrustedProxiesConfig": + def from_env(cls) -> TrustedProxiesConfig: """Create a config from environment variables.""" proxies_count = int(os.environ.get("PROXIES_COUNT") or "0") real_ip_header = os.environ.get("REAL_IP_HEADER") diff --git a/components/renku_data_services/authn/gitlab.py b/components/renku_data_services/authn/gitlab.py index 9c3686f10..0d1bc72ab 100644 --- a/components/renku_data_services/authn/gitlab.py +++ b/components/renku_data_services/authn/gitlab.py @@ -89,3 +89,14 @@ async def _get_gitlab_api_user(self, access_token: str, headers: Header) -> base full_name=full_name, access_token_expires_at=expires_at, ) + + +@dataclass +class EmptyGitlabAuthenticator: + """An empty gitlab authenticator used to decouple gitlab from Renku.""" + + token_field: str = "Not-Applicable" + + async def authenticate(self, _: str, __: Request) -> base_models.APIUser: + """Always return an anonymous user.""" + return base_models.APIUser() diff --git a/components/renku_data_services/authn/keycloak.py b/components/renku_data_services/authn/keycloak.py index 2f082929a..fbf8baec8 100644 --- a/components/renku_data_services/authn/keycloak.py +++ b/components/renku_data_services/authn/keycloak.py @@ -1,5 +1,7 @@ """Keycloak user store.""" +from __future__ import annotations + from contextlib import suppress from dataclasses import dataclass from datetime import datetime @@ -9,10 +11,12 @@ import jwt from jwt import PyJWKClient from sanic import Request +from tenacity import retry, stop_after_attempt, stop_after_delay, wait_fixed from ulid import ULID import renku_data_services.base_models as base_models from renku_data_services import errors +from renku_data_services.app_config.config import KeycloakConfig from renku_data_services.base_models.core import Authenticator from renku_data_services.utils.core import get_ssl_context @@ -53,6 +57,31 @@ def __post_init__(self) -> None: if len(self.algorithms) == 0: raise errors.ConfigurationError(message="At least one algorithm for token validation has to be specified.") + @classmethod + def new(cls, kc_config: KeycloakConfig) -> KeycloakAuthenticator: + """Create a new KeycloakAuthenticator instance.""" + + @retry(stop=(stop_after_attempt(20) | stop_after_delay(300)), wait=wait_fixed(2), reraise=True) + def oidc_discovery(kc_config: KeycloakConfig) -> dict[str, Any]: + """Get OIDC configuration.""" + url = f"{kc_config.url}/realms/{kc_config.realm}/.well-known/openid-configuration" + res = httpx.get(url, verify=get_ssl_context(), timeout=5) + if res.status_code == 200: + return cast(dict[str, Any], res.json()) + raise errors.ConfigurationError(message=f"Cannot successfully do OIDC discovery with url {url}.") + + oidc_disc_data = oidc_discovery(kc_config) + jwks_url = oidc_disc_data.get("jwks_uri") + if jwks_url is None: + raise errors.ConfigurationError( + message="The JWKS url for Keycloak cannot be found from the OIDC discovery endpoint." + ) + jwks = PyJWKClient(jwks_url) + if kc_config.algorithms is None: + raise errors.ConfigurationError(message="At least one token signature algorithm is required.") + + return cls(jwks=jwks, algorithms=kc_config.algorithms) + def _validate(self, token: str) -> dict[str, Any]: try: sk = self.jwks.get_signing_key_from_jwt(token) diff --git a/components/renku_data_services/base_api/auth.py b/components/renku_data_services/base_api/auth.py index 16b76b09d..d51b2d4eb 100644 --- a/components/renku_data_services/base_api/auth.py +++ b/components/renku_data_services/base_api/auth.py @@ -156,30 +156,3 @@ async def decorated_function(*args: _P.args, **kwargs: _P.kwargs) -> _T: return response return decorated_function - - -def internal_gitlab_authenticate( - authenticator: Authenticator, -) -> Callable[ - [Callable[Concatenate[Request, APIUser, APIUser, _P], Coroutine[Any, Any, _T]]], - Callable[Concatenate[Request, APIUser, _P], Coroutine[Any, Any, _T]], -]: - """Decorator for a Sanic handler that that adds a user for the internal gitlab user.""" - - def decorator( - f: Callable[Concatenate[Request, APIUser, APIUser, _P], Coroutine[Any, Any, _T]], - ) -> Callable[Concatenate[Request, APIUser, _P], Coroutine[Any, Any, _T]]: - @wraps(f) - async def decorated_function( - request: Request, - user: APIUser, - *args: _P.args, - **kwargs: _P.kwargs, - ) -> _T: - access_token = str(request.headers.get("Gitlab-Access-Token")) - internal_gitlab_user = await authenticator.authenticate(access_token, request) - return await f(request, user, internal_gitlab_user, *args, **kwargs) - - return decorated_function - - return decorator diff --git a/components/renku_data_services/base_models/core.py b/components/renku_data_services/base_models/core.py index 5d45e8cf9..bdbf7f7fd 100644 --- a/components/renku_data_services/base_models/core.py +++ b/components/renku_data_services/base_models/core.py @@ -53,7 +53,7 @@ def get_full_name(self) -> str | None: @dataclass(kw_only=True, frozen=True) class AuthenticatedAPIUser(APIUser): - """The model for a an authenticated user of the API.""" + """The model for an authenticated user of the API.""" id: str email: str diff --git a/components/renku_data_services/connected_services/api.spec.yaml b/components/renku_data_services/connected_services/api.spec.yaml index 7b235b882..d783c68b3 100644 --- a/components/renku_data_services/connected_services/api.spec.yaml +++ b/components/renku_data_services/connected_services/api.spec.yaml @@ -175,6 +175,21 @@ paths: $ref: "#/components/responses/Error" tags: - oauth2 + delete: + summary: Delete an OAuth2 connection + parameters: + - in: path + name: connection_id + required: true + schema: + type: string + responses: + "204": + description: If deleted successfully + default: + $ref: "#/components/responses/Error" + tags: + - oauth2 /oauth2/connections/{connection_id}/account: get: summary: Get the account information for this OAuth2 connection for the currently authenticated user if their account is connected @@ -272,6 +287,10 @@ components: $ref: "#/components/schemas/ProviderUrl" use_pkce: $ref: "#/components/schemas/UsePKCE" + image_registry_url: + $ref: "#/components/schemas/ImageRegistryUrl" + oidc_issuer_url: + $ref: "#/components/schemas/OidcIssuerUrl" required: - id - kind @@ -303,6 +322,10 @@ components: $ref: "#/components/schemas/ProviderUrl" use_pkce: $ref: "#/components/schemas/UsePKCE" + image_registry_url: + $ref: "#/components/schemas/ImageRegistryUrl" + oidc_issuer_url: + $ref: "#/components/schemas/OidcIssuerUrl" required: - id - kind @@ -330,6 +353,10 @@ components: $ref: "#/components/schemas/ProviderUrl" use_pkce: $ref: "#/components/schemas/UsePKCE" + image_registry_url: + $ref: "#/components/schemas/ImageRegistryUrl" + oidc_issuer_url: + $ref: "#/components/schemas/OidcIssuerUrl" ConnectionList: type: array items: @@ -404,6 +431,7 @@ components: - "drive" - "onedrive" - "dropbox" + - "generic_oidc" example: "gitlab" ApplicationSlug: description: | @@ -468,6 +496,17 @@ components: minimum: 1 maximum: 100 default: 20 + ImageRegistryUrl: + type: string + description: | + This should contain no paths, just the domain for the registry and the scheme + (http or https) to access the image registry API. + example: https://registry.gitlab.com + OidcIssuerUrl: + type: string + description: | + The URL for OpenID Connect client discovery. Used for providers of kind 'generic_oidc'. + example: https://renkulab.io/auth/realms/Renku ErrorResponse: type: object properties: diff --git a/components/renku_data_services/connected_services/apispec.py b/components/renku_data_services/connected_services/apispec.py index bac27d378..f6f9c5417 100644 --- a/components/renku_data_services/connected_services/apispec.py +++ b/components/renku_data_services/connected_services/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2025-03-19T10:21:11+00:00 +# timestamp: 2025-09-05T11:16:18+00:00 from __future__ import annotations @@ -34,6 +34,7 @@ class ProviderKind(Enum): drive = "drive" onedrive = "onedrive" dropbox = "dropbox" + generic_oidc = "generic_oidc" class ConnectionStatus(Enum): @@ -120,6 +121,16 @@ class Provider(BaseAPISpec): description="Whether or not to use PKCE during authorization flows.\n", examples=[False], ) + image_registry_url: Optional[str] = Field( + None, + description="This should contain no paths, just the domain for the registry and the scheme\n(http or https) to access the image registry API.\n", + examples=["https://registry.gitlab.com"], + ) + oidc_issuer_url: Optional[str] = Field( + None, + description="The URL for OpenID Connect client discovery. Used for providers of kind 'generic_oidc'.\n", + examples=["https://renkulab.io/auth/realms/Renku"], + ) class ProviderPost(BaseAPISpec): @@ -155,10 +166,20 @@ class ProviderPost(BaseAPISpec): examples=["https://example.org"], ) use_pkce: Optional[bool] = Field( - None, + False, description="Whether or not to use PKCE during authorization flows.\n", examples=[False], ) + image_registry_url: Optional[str] = Field( + None, + description="This should contain no paths, just the domain for the registry and the scheme\n(http or https) to access the image registry API.\n", + examples=["https://registry.gitlab.com"], + ) + oidc_issuer_url: Optional[str] = Field( + None, + description="The URL for OpenID Connect client discovery. Used for providers of kind 'generic_oidc'.\n", + examples=["https://renkulab.io/auth/realms/Renku"], + ) class ProviderPatch(BaseAPISpec): @@ -189,10 +210,20 @@ class ProviderPatch(BaseAPISpec): examples=["https://example.org"], ) use_pkce: Optional[bool] = Field( - None, + False, description="Whether or not to use PKCE during authorization flows.\n", examples=[False], ) + image_registry_url: Optional[str] = Field( + None, + description="This should contain no paths, just the domain for the registry and the scheme\n(http or https) to access the image registry API.\n", + examples=["https://registry.gitlab.com"], + ) + oidc_issuer_url: Optional[str] = Field( + None, + description="The URL for OpenID Connect client discovery. Used for providers of kind 'generic_oidc'.\n", + examples=["https://renkulab.io/auth/realms/Renku"], + ) class Connection(BaseAPISpec): diff --git a/components/renku_data_services/connected_services/blueprints.py b/components/renku_data_services/connected_services/blueprints.py index 69f431379..0a33c3225 100644 --- a/components/renku_data_services/connected_services/blueprints.py +++ b/components/renku_data_services/connected_services/blueprints.py @@ -4,7 +4,7 @@ from typing import Any from urllib.parse import unquote, urlparse, urlunparse -from sanic import HTTPResponse, Request, json, redirect +from sanic import HTTPResponse, Request, empty, json, redirect from sanic.response import JSONResponse from sanic_ext import validate from ulid import ULID @@ -18,7 +18,7 @@ from renku_data_services.base_models.validation import validate_and_dump, validated_json from renku_data_services.connected_services import apispec from renku_data_services.connected_services.apispec_base import AuthorizeParams, CallbackParams -from renku_data_services.connected_services.core import validate_oauth2_client_patch +from renku_data_services.connected_services.core import validate_oauth2_client_patch, validate_unsaved_oauth2_client from renku_data_services.connected_services.db import ConnectedServicesRepository logger = logging.getLogger(__name__) @@ -59,7 +59,8 @@ def post(self) -> BlueprintFactoryResponse: @only_admins @validate(json=apispec.ProviderPost) async def _post(_: Request, user: base_models.APIUser, body: apispec.ProviderPost) -> JSONResponse: - client = await self.connected_services_repo.insert_oauth2_client(user=user, new_client=body) + new_client = validate_unsaved_oauth2_client(body) + client = await self.connected_services_repo.insert_oauth2_client(user=user, new_client=new_client) return validated_json(apispec.Provider, client, 201) return "/oauth2/providers", ["POST"], _post @@ -144,7 +145,6 @@ class OAuth2ConnectionsBP(CustomBlueprint): connected_services_repo: ConnectedServicesRepository authenticator: base_models.Authenticator - internal_gitlab_authenticator: base_models.Authenticator def get_all(self) -> BlueprintFactoryResponse: """List all OAuth2 connections.""" @@ -168,6 +168,17 @@ async def _get_one(_: Request, user: base_models.APIUser, connection_id: ULID) - return "/oauth2/connections/", ["GET"], _get_one + def delete(self) -> BlueprintFactoryResponse: + """Delete a specific OAuth2 connection.""" + + @authenticate(self.authenticator) + async def _delete_one(_: Request, user: base_models.APIUser, connection_id: ULID) -> HTTPResponse: + result = await self.connected_services_repo.delete_oauth2_connection(user, connection_id) + + return empty(status=204 if result else 404) + + return "/oauth2/connections/", ["DELETE"], _delete_one + def get_account(self) -> BlueprintFactoryResponse: """Get the account information for a specific OAuth2 connection.""" diff --git a/components/renku_data_services/connected_services/core.py b/components/renku_data_services/connected_services/core.py index acc8f7c91..72b0e9b3f 100644 --- a/components/renku_data_services/connected_services/core.py +++ b/components/renku_data_services/connected_services/core.py @@ -1,12 +1,25 @@ """Business logic for connected services.""" +from urllib.parse import urlparse + from renku_data_services.connected_services import apispec, models +from renku_data_services.errors import errors def validate_oauth2_client_patch(patch: apispec.ProviderPatch) -> models.OAuth2ClientPatch: """Validate the update to a OAuth2 Client.""" + if patch.image_registry_url: + validate_image_registry_url(patch.image_registry_url) + kind = models.ProviderKind(patch.kind.value) if patch.kind else None + if kind == models.ProviderKind.generic_oidc: + if not patch.oidc_issuer_url: + raise errors.ValidationError( + message=f"The field 'oidc_issuer_url' is required when kind is set to {models.ProviderKind.generic_oidc.value}.", # noqa E501 + quiet=True, + ) + validate_oidc_issuer_url(patch.oidc_issuer_url) return models.OAuth2ClientPatch( - kind=patch.kind, + kind=kind, app_slug=patch.app_slug, client_id=patch.client_id, client_secret=patch.client_secret, @@ -14,4 +27,70 @@ def validate_oauth2_client_patch(patch: apispec.ProviderPatch) -> models.OAuth2C scope=patch.scope, url=patch.url, use_pkce=patch.use_pkce, + image_registry_url=patch.image_registry_url, + oidc_issuer_url=patch.oidc_issuer_url, + ) + + +def validate_unsaved_oauth2_client(clnt: apispec.ProviderPost) -> models.UnsavedOAuth2Client: + """Validate the the creation of a new OAuth2 Client.""" + if clnt.image_registry_url is not None: + validate_image_registry_url(clnt.image_registry_url) + kind = models.ProviderKind(clnt.kind.value) + if clnt.oidc_issuer_url and kind != models.ProviderKind.generic_oidc: + raise errors.ValidationError( + message=f"The field 'oidc_issuer_url' can only be set when kind is set to {models.ProviderKind.generic_oidc.value}.", # noqa E501 + quiet=True, + ) + if kind == models.ProviderKind.generic_oidc: + if not clnt.oidc_issuer_url: + raise errors.ValidationError( + message=f"The field 'oidc_issuer_url' is required when kind is set to {models.ProviderKind.generic_oidc.value}.", # noqa E501 + quiet=True, + ) + validate_oidc_issuer_url(clnt.oidc_issuer_url) + return models.UnsavedOAuth2Client( + id=clnt.id, + kind=kind, + app_slug=clnt.app_slug or "", + client_id=clnt.client_id, + client_secret=clnt.client_secret, + display_name=clnt.display_name, + scope=clnt.scope, + url=clnt.url, + use_pkce=clnt.use_pkce or False, + image_registry_url=clnt.image_registry_url, + oidc_issuer_url=clnt.oidc_issuer_url, ) + + +def validate_image_registry_url(url: str) -> None: + """Validate an image registry url.""" + parsed = urlparse(url) + if not parsed.netloc: + raise errors.ValidationError( + message=f"The image registry url {url} is not valid, expected a valid url starting with the scheme.", + quiet=True, + ) + accepted_schemes = ["https"] + if parsed.scheme not in accepted_schemes: + raise errors.ValidationError( + message=f"The scheme for the image registry url {url} is not valid, expected one of {accepted_schemes}", + quiet=True, + ) + + +def validate_oidc_issuer_url(url: str) -> None: + """Validate an OpenID Connect Issuer URL.""" + parsed = urlparse(url) + if not parsed.netloc: + raise errors.ValidationError( + message=f"The host for the 'oidc_issuer_url' {url} is not valid, expected a non-empty value.", + quiet=True, + ) + accepted_schemes = ["https"] + if parsed.scheme not in accepted_schemes: + raise errors.ValidationError( + message=f"The scheme for the 'oidc_issuer_url' {url} is not valid, expected one of {accepted_schemes}", + quiet=True, + ) diff --git a/components/renku_data_services/connected_services/db.py b/components/renku_data_services/connected_services/db.py index c123ab6a8..8bd80f710 100644 --- a/components/renku_data_services/connected_services/db.py +++ b/components/renku_data_services/connected_services/db.py @@ -4,11 +4,11 @@ from collections.abc import AsyncGenerator, Callable from contextlib import asynccontextmanager from typing import Any -from urllib.parse import urljoin +from urllib.parse import urljoin, urlparse from authlib.integrations.base_client import InvalidTokenError from authlib.integrations.httpx_client import AsyncOAuth2Client, OAuthError -from sqlalchemy import select +from sqlalchemy import and_, select from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import selectinload from ulid import ULID @@ -17,15 +17,20 @@ from renku_data_services import errors from renku_data_services.app_config import logging from renku_data_services.base_api.pagination import PaginationRequest -from renku_data_services.connected_services import apispec, models +from renku_data_services.connected_services import models from renku_data_services.connected_services import orm as schemas -from renku_data_services.connected_services.apispec import ConnectionStatus, ProviderKind from renku_data_services.connected_services.provider_adapters import ( GitHubAdapter, ProviderAdapter, get_provider_adapter, ) -from renku_data_services.connected_services.utils import generate_code_verifier +from renku_data_services.connected_services.utils import ( + GitHubProviderType, + generate_code_verifier, + get_github_provider_type, +) +from renku_data_services.notebooks.api.classes.image import Image, ImageRepoDockerAPI +from renku_data_services.users.db import APIUser from renku_data_services.utils.cryptography import decrypt_string, encrypt_string logger = logging.getLogger(__name__) @@ -39,12 +44,11 @@ def __init__( session_maker: Callable[..., AsyncSession], encryption_key: bytes, async_oauth2_client_class: type[AsyncOAuth2Client], - internal_gitlab_url: str | None, ): self.session_maker = session_maker self.encryption_key = encryption_key self.async_oauth2_client_class = async_oauth2_client_class - self.internal_gitlab_url = internal_gitlab_url.rstrip("/") if internal_gitlab_url else None + self.supported_image_registry_providers = {models.ProviderKind.gitlab, models.ProviderKind.github} async def get_oauth2_clients( self, @@ -70,9 +74,7 @@ async def get_oauth2_client(self, provider_id: str, user: base_models.APIUser) - return client.dump(user_is_admin=user.is_admin) async def insert_oauth2_client( - self, - user: base_models.APIUser, - new_client: apispec.ProviderPost, + self, user: base_models.APIUser, new_client: models.UnsavedOAuth2Client ) -> models.OAuth2Client: """Insert a new OAuth2 Client environment.""" if user.id is None: @@ -95,6 +97,8 @@ async def insert_oauth2_client( url=new_client.url, use_pkce=new_client.use_pkce or False, created_by_id=user.id, + image_registry_url=new_client.image_registry_url, + oidc_issuer_url=new_client.oidc_issuer_url or None, ) async with self.session_maker() as session, session.begin(): @@ -146,6 +150,19 @@ async def update_oauth2_client( client.url = patch.url if patch.use_pkce is not None: client.use_pkce = patch.use_pkce + if patch.image_registry_url: + # Patching with a string of at least length 1 updates the value + client.image_registry_url = patch.image_registry_url + elif patch.image_registry_url == "": + # Patching with "", removes the value + client.image_registry_url = None + if patch.oidc_issuer_url: + client.oidc_issuer_url = patch.oidc_issuer_url + elif patch.oidc_issuer_url == "": + client.oidc_issuer_url = None + # Unset oidc_issuer_url when the kind has been changed to a value other than 'generic_oidc' + if client.kind != models.ProviderKind.generic_oidc: + client.oidc_issuer_url = None await session.flush() await session.refresh(client) @@ -218,14 +235,14 @@ async def authorize_client( client_id=client.id, token=None, state=state, - status=schemas.ConnectionStatus.pending, + status=models.ConnectionStatus.pending, code_verifier=code_verifier, next_url=next_url, ) session.add(connection) else: connection.state = state - connection.status = schemas.ConnectionStatus.pending + connection.status = models.ConnectionStatus.pending connection.code_verifier = code_verifier connection.next_url = next_url @@ -274,17 +291,36 @@ async def authorize_callback(self, state: str, raw_url: str, callback_url: str) adapter.token_endpoint_url, authorization_response=raw_url, code_verifier=code_verifier ) - logger.info(f"Token for client {client.id} has keys: {", ".join(token.keys())}") + logger.info(f"Token for client {client.id} has keys: {', '.join(token.keys())}") next_url = connection.next_url connection.token = self._encrypt_token_set(token=token, user_id=connection.user_id) connection.state = None - connection.status = schemas.ConnectionStatus.connected + connection.status = models.ConnectionStatus.connected connection.next_url = None return next_url + async def delete_oauth2_connection(self, user: base_models.APIUser, connection_id: ULID) -> bool: + """Delete one connection of the given user.""" + if not user.is_authenticated or user.id is None: + return False + + async with self.session_maker() as session, session.begin(): + result = await session.scalars( + select(schemas.OAuth2ConnectionORM) + .where(schemas.OAuth2ConnectionORM.id == connection_id) + .where(schemas.OAuth2ConnectionORM.user_id == user.id) + ) + conn = result.one_or_none() + + if conn is None: + return False + + await session.delete(conn) + return True + async def get_oauth2_connections( self, user: base_models.APIUser, @@ -300,8 +336,10 @@ async def get_oauth2_connections( connections = result.all() return [c.dump() for c in connections] - async def get_oauth2_connection(self, connection_id: ULID, user: base_models.APIUser) -> models.OAuth2Connection: - """Get one OAuth2 connection from the database.""" + async def get_oauth2_connection_or_none( + self, connection_id: ULID, user: base_models.APIUser + ) -> models.OAuth2Connection | None: + """Get one OAuth2 connection from the database. Throw if the user is not authenticated.""" if not user.is_authenticated or user.id is None: raise errors.MissingResourceError( message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." @@ -314,11 +352,23 @@ async def get_oauth2_connection(self, connection_id: ULID, user: base_models.API .where(schemas.OAuth2ConnectionORM.user_id == user.id) ) connection = result.one_or_none() - if connection is None: - raise errors.MissingResourceError( - message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." # noqa: E501 - ) - return connection.dump() + if connection: + return connection.dump() + else: + return None + + async def get_oauth2_connection(self, connection_id: ULID, user: base_models.APIUser) -> models.OAuth2Connection: + """Get one OAuth2 connection from the database. + + Throw if the connection doesn't exist or the user is not authenticated. + """ + connection = await self.get_oauth2_connection_or_none(connection_id, user) + if connection is None: + raise errors.MissingResourceError( + message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." + ) + + return connection async def get_oauth2_connected_account( self, connection_id: ULID, user: base_models.APIUser @@ -358,6 +408,54 @@ async def get_oauth2_connection_token( token_model = models.OAuth2TokenSet.from_dict(oauth2_client.token) return token_model + async def get_provider_for_image(self, user: APIUser, image: Image) -> models.ImageProvider | None: + """Find a provider supporting the given an image.""" + registry_urls = [f"http://{image.hostname}", f"https://{image.hostname}"] + async with self.session_maker() as session: + stmt = ( + select(schemas.OAuth2ClientORM, schemas.OAuth2ConnectionORM) + .join( + schemas.OAuth2ConnectionORM, + and_( + schemas.OAuth2ConnectionORM.client_id == schemas.OAuth2ClientORM.id, + schemas.OAuth2ConnectionORM.user_id == user.id, + ), + isouter=True, # isouter makes it a left-join, not an outer join + ) + .where(schemas.OAuth2ClientORM.image_registry_url.in_(registry_urls)) + .where(schemas.OAuth2ClientORM.kind.in_(self.supported_image_registry_providers)) + # there could be multiple matching - just take the first arbitrary 🤷 + .order_by(schemas.OAuth2ConnectionORM.updated_at.desc()) + .limit(1) + ) + result = await session.execute(stmt) + row = result.one_or_none() + if row is None or row.OAuth2ClientORM is None: + return None + else: + return models.ImageProvider( + row.OAuth2ClientORM.dump(), + models.ConnectedUser(row.OAuth2ConnectionORM.dump(), user) + if row.OAuth2ConnectionORM is not None + else None, + str(row.OAuth2ClientORM.image_registry_url), # above query makes it non-nil + ) + + async def get_image_repo_client(self, image_provider: models.ImageProvider) -> ImageRepoDockerAPI: + """Create a image repository client for the given user and image provider.""" + url = urlparse(image_provider.registry_url) + repo_api = ImageRepoDockerAPI(hostname=url.netloc, scheme=url.scheme) + if image_provider.is_connected(): + assert image_provider.connected_user is not None + user = image_provider.connected_user.user + conn = image_provider.connected_user.connection + token_set = await self.get_oauth2_connection_token(conn.id, user) + access_token = token_set.access_token + if access_token: + logger.debug(f"Use connection {conn.id} to {image_provider.provider.id} for user {user.id}") + repo_api = repo_api.with_oauth2_token(access_token) + return repo_api + async def get_oauth2_app_installations( self, connection_id: ULID, user: base_models.APIUser, pagination: PaginationRequest ) -> models.AppInstallationList: @@ -367,13 +465,18 @@ async def get_oauth2_app_installations( connection, adapter, ): - # NOTE: App installations are only available from GitHub - if connection.client.kind == ProviderKind.github and isinstance(adapter, GitHubAdapter): + # NOTE: App installations are only available from GitHub when using a "GitHub App" + if ( + connection.client.kind == models.ProviderKind.github + and get_github_provider_type(connection.client) == GitHubProviderType.standard_app + and isinstance(adapter, GitHubAdapter) + ): request_url = urljoin(adapter.api_url, "user/installations") params = dict(page=pagination.page, per_page=pagination.per_page) try: response = await oauth2_client.get(request_url, params=params, headers=adapter.api_common_headers) except OAuthError as err: + logger.warning(f"Error getting installations at {request_url}: {err}") if err.error == "bad_refresh_token": raise errors.InvalidTokenError( message="The refresh token for the connected service has expired or is invalid.", @@ -383,6 +486,9 @@ async def get_oauth2_app_installations( raise if response.status_code > 200: + logger.warning( + f"Could not get installations at {request_url}: {response.status_code} - {response.text}" + ) raise errors.UnauthorizedError(message="Could not get installation information.") return adapter.api_validate_app_installations_response(response) @@ -412,7 +518,7 @@ async def get_async_oauth2_client( message=f"OAuth2 connection with id '{connection_id}' does not exist or you do not have access to it." # noqa: E501 ) - if connection.status != ConnectionStatus.connected or connection.token is None: + if connection.status != models.ConnectionStatus.connected or connection.token is None: raise errors.UnauthorizedError(message=f"OAuth2 connection with id '{connection_id}' is not valid.") client = connection.client diff --git a/components/renku_data_services/connected_services/dummy_async_oauth2_client.py b/components/renku_data_services/connected_services/dummy_async_oauth2_client.py index f944bd94a..3b36fe09c 100644 --- a/components/renku_data_services/connected_services/dummy_async_oauth2_client.py +++ b/components/renku_data_services/connected_services/dummy_async_oauth2_client.py @@ -26,10 +26,10 @@ async def get(self, url: str, *args: list, **kwargs: dict) -> Response: if parsed.path == "/api/v4/projects/username%2Fmy_repo": return self._get_repository_response() - if parsed.path == "/user/installations": + if parsed.path == "/api/v3/user/installations" or parsed.path == "/user/installations": return self._get_installations_response() - return Response(500, json=dict()) + return Response(500, json={"error": f"path is not expected: {parsed.path}"}) @staticmethod def _get_account_response() -> Response: diff --git a/components/renku_data_services/connected_services/external_models.py b/components/renku_data_services/connected_services/external_models.py index fd704470b..19cb51b5a 100644 --- a/components/renku_data_services/connected_services/external_models.py +++ b/components/renku_data_services/connected_services/external_models.py @@ -5,7 +5,6 @@ from pydantic import BaseModel from renku_data_services.connected_services import models -from renku_data_services.connected_services.apispec import RepositorySelection class GitLabConnectedAccount(BaseModel): @@ -35,7 +34,7 @@ class GitHubAppInstallation(BaseModel): id: int account: GitHubConnectedAccount - repository_selection: RepositorySelection + repository_selection: models.RepositorySelection suspended_at: datetime | None = None def to_app_installation(self) -> models.AppInstallation: @@ -101,3 +100,23 @@ def to_connected_account(self) -> models.ConnectedAccount: return models.ConnectedAccount( username=" ".join(filter(None, [self.given_name, self.family_name])), web_url=f"mailto:{self.email}" ) + + +class GenericOIDCConnectedAccount(BaseModel): + """OAuth2 connected account model for generic OpenID Connect.""" + + # Reference: https://openid.net/specs/openid-connect-core-1_0.html#StandardClaims + sub: str + name: str | None + preferred_username: str | None + + def to_connected_account(self) -> models.ConnectedAccount: + """Returns the corresponding ConnectedAccount object.""" + + return models.ConnectedAccount( + username=self._get_username(), + web_url="", + ) + + def _get_username(self) -> str: + return self.preferred_username or self.name or self.sub diff --git a/components/renku_data_services/connected_services/models.py b/components/renku_data_services/connected_services/models.py index 5d43e57f9..acd9db813 100644 --- a/components/renku_data_services/connected_services/models.py +++ b/components/renku_data_services/connected_services/models.py @@ -2,15 +2,41 @@ from dataclasses import dataclass from datetime import UTC, datetime +from enum import StrEnum from typing import Any from ulid import ULID -from renku_data_services.connected_services.apispec import ConnectionStatus, ProviderKind, RepositorySelection +from renku_data_services.users.db import APIUser + + +class ProviderKind(StrEnum): + """The kind of platform we connnect to.""" + + gitlab = "gitlab" + github = "github" + drive = "drive" + onedrive = "onedrive" + dropbox = "dropbox" + generic_oidc = "generic_oidc" + + +class ConnectionStatus(StrEnum): + """The status of a connection.""" + + connected = "connected" + pending = "pending" + + +class RepositorySelection(StrEnum): + """The repository selection for GitHub applications.""" + + all = "all" + selected = "selected" @dataclass(frozen=True, eq=True, kw_only=True) -class OAuth2Client: +class UnsavedOAuth2Client: """OAuth2 Client model.""" id: str @@ -22,6 +48,14 @@ class OAuth2Client: scope: str url: str use_pkce: bool + image_registry_url: str | None = None + oidc_issuer_url: str | None = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class OAuth2Client(UnsavedOAuth2Client): + """OAuth2 Client model.""" + created_by_id: str creation_date: datetime updated_at: datetime @@ -39,6 +73,8 @@ class OAuth2ClientPatch: scope: str | None url: str | None use_pkce: bool | None + image_registry_url: str | None + oidc_issuer_url: str | None @dataclass(frozen=True, eq=True, kw_only=True) @@ -49,6 +85,10 @@ class OAuth2Connection: provider_id: str status: ConnectionStatus + def is_connected(self) -> bool: + """Returns whether this connection is in status 'connected'.""" + return self.status == ConnectionStatus.connected + @dataclass(frozen=True, eq=True, kw_only=True) class ConnectedAccount: @@ -115,3 +155,40 @@ class AppInstallationList: total_count: int installations: list[AppInstallation] + + +@dataclass(frozen=True, eq=True) +class ConnectedUser: + """A user and the corresponding oauth2 connection.""" + + connection: OAuth2Connection + user: APIUser + + def is_connected(self) -> bool: + """Returns whether the connection is in status 'connected'.""" + return self.connection.is_connected() + + +@dataclass(frozen=True, eq=True) +class ImageProvider: + """Result when retrieving provider information for an image.""" + + provider: OAuth2Client + connected_user: ConnectedUser | None + registry_url: str + + def is_connected(self) -> bool: + """Returns whether the connection exists and is in status 'connected'.""" + return self.connected_user is not None and self.connected_user.is_connected() + + @property + def connection(self) -> OAuth2Connection | None: + """Return the connection if present.""" + if self.connected_user: + return self.connected_user.connection + else: + return None + + def __str__(self) -> str: + conn = f"connection={self.connection.id}" if self.connection else "connection=None" + return f"ImageProvider(provider={self.provider.id}/{self.provider.kind}, {conn})" diff --git a/components/renku_data_services/connected_services/orm.py b/components/renku_data_services/connected_services/orm.py index 4757a1fab..5db8d2584 100644 --- a/components/renku_data_services/connected_services/orm.py +++ b/components/renku_data_services/connected_services/orm.py @@ -10,7 +10,6 @@ from ulid import ULID from renku_data_services.connected_services import models -from renku_data_services.connected_services.apispec import ConnectionStatus, ProviderKind from renku_data_services.utils.sqlalchemy import ULIDType JSONVariant = JSON().with_variant(JSONB(), "postgresql") @@ -32,7 +31,7 @@ class OAuth2ClientORM(BaseORM): client_id: Mapped[str] = mapped_column("client_id", String(500), repr=False) display_name: Mapped[str] = mapped_column("display_name", String(99)) created_by_id: Mapped[str] = mapped_column("created_by_id", String()) - kind: Mapped[ProviderKind] + kind: Mapped[models.ProviderKind] scope: Mapped[str] = mapped_column("scope", String()) url: Mapped[str] = mapped_column("url", String()) use_pkce: Mapped[bool] = mapped_column("use_pkce", Boolean(), server_default=false()) @@ -49,6 +48,8 @@ class OAuth2ClientORM(BaseORM): onupdate=func.now(), nullable=False, ) + image_registry_url: Mapped[str | None] = mapped_column(default=None, nullable=True, server_default=None) + oidc_issuer_url: Mapped[str | None] = mapped_column(default=None, nullable=True, server_default=None) def dump(self, user_is_admin: bool = False) -> models.OAuth2Client: """Create an OAuth2 Client model from the OAuth2ClientORM. @@ -68,6 +69,8 @@ def dump(self, user_is_admin: bool = False) -> models.OAuth2Client: created_by_id=self.created_by_id, creation_date=self.creation_date, updated_at=self.updated_at, + image_registry_url=self.image_registry_url, + oidc_issuer_url=self.oidc_issuer_url, ) @@ -81,7 +84,7 @@ class OAuth2ConnectionORM(BaseORM): client: Mapped[OAuth2ClientORM] = relationship(init=False, repr=False) token: Mapped[dict[str, Any] | None] = mapped_column("token", JSONVariant) state: Mapped[str | None] = mapped_column("state", String(), index=True, unique=True) - status: Mapped[ConnectionStatus] + status: Mapped[models.ConnectionStatus] code_verifier: Mapped[str | None] = mapped_column("code_verifier", String()) next_url: Mapped[str | None] = mapped_column("next_url", String()) creation_date: Mapped[datetime] = mapped_column( diff --git a/components/renku_data_services/connected_services/provider_adapters.py b/components/renku_data_services/connected_services/provider_adapters.py index 98826c011..b49fea2a2 100644 --- a/components/renku_data_services/connected_services/provider_adapters.py +++ b/components/renku_data_services/connected_services/provider_adapters.py @@ -1,14 +1,17 @@ """Adapters for each kind of OAuth2 client.""" +import logging from abc import ABC, abstractmethod +from typing import Any from urllib.parse import urljoin, urlparse, urlunparse -from httpx import Response +from httpx import Client, Response from renku_data_services import errors from renku_data_services.connected_services import external_models, models from renku_data_services.connected_services import orm as schemas -from renku_data_services.connected_services.apispec import ProviderKind + +logger = logging.getLogger(__name__) class ProviderAdapter(ABC): @@ -17,7 +20,7 @@ class ProviderAdapter(ABC): user_info_endpoint = "user" user_info_method = "GET" - def __init__(self, client_url: str) -> None: + def __init__(self, client_url: str, **kwargs: Any) -> None: self.client_url = client_url @property @@ -78,7 +81,7 @@ def api_validate_account_response(self, response: Response) -> models.ConnectedA class GitHubAdapter(ProviderAdapter): - """Adapter for GitLab OAuth2 clients.""" + """Adapter for GitHub OAuth2 clients.""" @property def authorization_url(self) -> str: @@ -94,6 +97,9 @@ def token_endpoint_url(self) -> str: def api_url(self) -> str: """The URL used for API calls on the Resource Server.""" url = urlparse(self.client_url) + # See: https://docs.github.com/en/apps/sharing-github-apps/making-your-github-app-available-for-github-enterprise-server#the-app-code-must-use-the-correct-urls + if url.netloc != "github.com": + return urljoin(self.client_url, "api/v3/") url = url._replace(netloc=f"api.{url.netloc}") return urlunparse(url) @@ -229,12 +235,91 @@ def api_validate_account_response(self, response: Response) -> models.ConnectedA return external_models.DropboxConnectedAccount.model_validate(response.json()).to_connected_account() -_adapter_map: dict[ProviderKind, type[ProviderAdapter]] = { - ProviderKind.gitlab: GitLabAdapter, - ProviderKind.github: GitHubAdapter, - ProviderKind.drive: GoogleDriveAdapter, - ProviderKind.onedrive: OneDriveAdapter, - ProviderKind.dropbox: DropboxAdapter, +class GenericOidcAdapter(ProviderAdapter): + """Adapter for generic OpenID Connect clients.""" + + _httpx_client: Client | None = None + _configurations: dict[str, dict[str, str]] = dict() + + def __init__(self, client_url: str, oidc_issuer_url: str, **kwargs: Any) -> None: + super().__init__(client_url, **kwargs) + self.oidc_issuer_url = oidc_issuer_url + + @property + def authorization_url(self) -> str: + """The authorization URL for the OAuth2 protocol.""" + return self.__get_configuration()["authorization_endpoint"] + + @property + def token_endpoint_url(self) -> str: + """The token endpoint URL for the OAuth2 protocol.""" + return self.__get_configuration()["token_endpoint"] + + @property + def api_url(self) -> str: + """The URL used for API calls on the Resource Server.""" + return self.client_url + + @property + def user_info_endpoint(self) -> str: + """The URL of the user info endpoint.""" + return self.__get_configuration()["userinfo_endpoint"] + + @user_info_endpoint.setter + def user_info_endpoint(self, value: str) -> None: + """Setter for user_info_endpoint.""" + raise errors.ProgrammingError(message="Cannot set user_info_endpoint.") + + def api_validate_account_response(self, response: Response) -> models.ConnectedAccount: + """Validates and returns the connected account response from the Resource Server.""" + return external_models.GenericOIDCConnectedAccount.model_validate(response.json()).to_connected_account() + + def __get_configuration(self) -> dict[str, str]: + config = self.__get_configurations().get(self.oidc_issuer_url, None) + if config is None: + config = self.__discover() + self.__get_configurations()[self.oidc_issuer_url] = config + return config + + def __discover(self) -> dict[str, str]: + """Performs OpenID Connect discovery from the issuer URL. + + Reference: https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderConfig + See section 4: "Obtaining OpenID Provider Configuration Information" + """ + issuer_url = self.oidc_issuer_url + if not issuer_url: + raise errors.ValidationError(message="Issuer URL not configured for generic OIDC client.") + issuer_url = issuer_url if issuer_url.endswith("/") else issuer_url + "/" + request_url = urljoin(issuer_url, ".well-known/openid-configuration") + res = self.__get_httpx_client().get(request_url, headers=[("Accept", "application/json")]) + if res.status_code != 200: + raise errors.ProgrammingError(message=f"Could not read OIDC issuer configuration from {request_url}") + res_json = res.json() + return { + "authorization_endpoint": res_json["authorization_endpoint"], + "token_endpoint": res_json["token_endpoint"], + "userinfo_endpoint": res_json.get("userinfo_endpoint", ""), + } + + @classmethod + def __get_configurations(cls) -> dict[str, dict[str, str]]: + return cls._configurations + + @classmethod + def __get_httpx_client(cls) -> Client: + if cls._httpx_client is None: + cls._httpx_client = Client() + return cls._httpx_client + + +_adapter_map: dict[models.ProviderKind, type[ProviderAdapter]] = { + models.ProviderKind.gitlab: GitLabAdapter, + models.ProviderKind.github: GitHubAdapter, + models.ProviderKind.drive: GoogleDriveAdapter, + models.ProviderKind.onedrive: OneDriveAdapter, + models.ProviderKind.dropbox: DropboxAdapter, + models.ProviderKind.generic_oidc: GenericOidcAdapter, } @@ -246,4 +331,4 @@ def get_provider_adapter(client: schemas.OAuth2ClientORM) -> ProviderAdapter: raise errors.ValidationError(message=f"URL not defined for provider {client.id}.") adapter_class = _adapter_map[client.kind] - return adapter_class(client_url=client.url) + return adapter_class(client_url=client.url, oidc_issuer_url=client.oidc_issuer_url) diff --git a/components/renku_data_services/connected_services/utils.py b/components/renku_data_services/connected_services/utils.py index e2c3c854d..7080b1a87 100644 --- a/components/renku_data_services/connected_services/utils.py +++ b/components/renku_data_services/connected_services/utils.py @@ -2,9 +2,42 @@ import base64 import random +from enum import StrEnum + +from renku_data_services.app_config import logging +from renku_data_services.connected_services.apispec import Provider +from renku_data_services.connected_services.apispec import ProviderKind as ApiProviderKind +from renku_data_services.connected_services.models import OAuth2Client, ProviderKind +from renku_data_services.connected_services.orm import OAuth2ClientORM + +logger = logging.getLogger(__name__) def generate_code_verifier(size: int = 48) -> str: """Returns a randomly generated code for use in PKCE.""" rand = random.SystemRandom() return base64.b64encode(rand.randbytes(size)).decode() + + +class GitHubProviderType(StrEnum): + """Distinguish between the two possible authentication features at GitHub.""" + + oauth_app = "oauth_app" + standard_app = "standard_app" + + +# TODO: add a new type of integration 'github_oauth' instead of running this logic +def get_github_provider_type(c: OAuth2Client | OAuth2ClientORM | Provider) -> GitHubProviderType | None: + """GitHub may use two different auth features: "oauth app" and "github app". + + Currently these two are defined as ProviderKind.github and can be + distinguished by looking at the `image_registry_url`. If this url + is set, it is the "oauth app" type and otherwise the standard + "github app". + """ + if c.kind == ProviderKind.github or c.kind == ApiProviderKind.github: + result = GitHubProviderType.oauth_app if c.image_registry_url else GitHubProviderType.standard_app + logger.debug(f"Using github provider type: {result} for {c.kind}/{c.image_registry_url}") + return result + else: + return None diff --git a/components/renku_data_services/crc/api.spec.yaml b/components/renku_data_services/crc/api.spec.yaml index d0cc060f2..7a38b92e3 100644 --- a/components/renku_data_services/crc/api.spec.yaml +++ b/components/renku_data_services/crc/api.spec.yaml @@ -1280,6 +1280,8 @@ components: $ref: "#/components/schemas/PublicFlag" default: $ref: "#/components/schemas/DefaultFlag" + remote: + $ref: "#/components/schemas/RemoteConfiguration" idle_threshold: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: @@ -1294,6 +1296,7 @@ components: gpu: 10 public: true default: false + remote: false classes: - name: "resource class 1" cpu: 1.5 @@ -1325,6 +1328,8 @@ components: $ref: "#/components/schemas/PublicFlag" default: $ref: "#/components/schemas/DefaultFlag" + remote: + $ref: "#/components/schemas/RemoteConfigurationPatch" idle_threshold: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: @@ -1355,6 +1360,8 @@ components: $ref: "#/components/schemas/PublicFlag" default: $ref: "#/components/schemas/DefaultFlag" + remote: + $ref: "#/components/schemas/RemoteConfiguration" idle_threshold: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: @@ -1370,6 +1377,7 @@ components: id: 518c7d27-b5db-4aee-855f-f4638aded2d4 public: true default: false + remote: false classes: - name: "resource class 1" cpu: 1.5 @@ -1403,6 +1411,8 @@ components: $ref: "#/components/schemas/PublicFlag" default: $ref: "#/components/schemas/DefaultFlag" + remote: + $ref: "#/components/schemas/RemoteConfiguration" idle_threshold: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: @@ -1422,6 +1432,7 @@ components: id: 518c7d27-b5db-4aee-855f-f4638aded2d4 public: false default: false + remote: false classes: - name: "resource class 1" cpu: 1.5 @@ -1456,6 +1467,8 @@ components: $ref: "#/components/schemas/PublicFlag" default: $ref: "#/components/schemas/DefaultFlag" + remote: + $ref: "#/components/schemas/RemoteConfiguration" idle_threshold: $ref: "#/components/schemas/IdleThreshold" hibernation_threshold: @@ -1471,6 +1484,7 @@ components: id: 518c7d27-b5db-4aee-855f-f4638aded2d4 public: false default: false + remote: false classes: - name: "resource class 1" cpu: 1.5 @@ -1681,6 +1695,87 @@ components: description: A resource pool whose classes can be accessed by anyone default: false example: false + RemoteConfiguration: + type: object + description: | + The configuration used by Renku to start sessions remotely. + If this field is present, the corresponding resource pool starts remote sessions. + # NOTE: Using oneOf here allows to describe more types of remote sessions + oneOf: + - $ref: "#/components/schemas/RemoteConfigurationFirecrest" + RemoteConfigurationFirecrest: + type: object + description: | + The configuration for starting sessions remotely using + the FirecREST API + additionalProperties: false + properties: + kind: + type: string + enum: [ "firecrest" ] + description: Kind of remote resource pool + example: "firecrest" + provider_id: + $ref: "#/components/schemas/RemoteConfigurationFirecrestProviderId" + api_url: + $ref: "#/components/schemas/RemoteConfigurationFirecrestApiUrl" + system_name: + $ref: "#/components/schemas/RemoteConfigurationFirecrestSystemName" + partition: + $ref: "#/components/schemas/RemoteConfigurationFirecrestPartition" + required: + - kind + - api_url + - system_name + RemoteConfigurationFirecrestProviderId: + type: string + description: | + The ID of a provider (see oauth2 section). + This is used to allow seamless authentication using Renku integrations. + example: "my-provider" + RemoteConfigurationFirecrestApiUrl: + type: string + description: The base URL of the FirecREST API + example: "https://api.cscs.ch/hpc/firecrest/v2" + RemoteConfigurationFirecrestSystemName: + type: string + description: The name of the system to use with the FirecREST API + example: "eiger" + RemoteConfigurationFirecrestPartition: + type: string + description: The partition to use when submitting jobs + example: "normal" + RemoteConfigurationPatch: + type: object + description: | + Patch for the configuration used by to start sessions remotely + oneOf: + - $ref: "#/components/schemas/RemoteConfigurationPatchReset" + - $ref: "#/components/schemas/RemoteConfigurationFirecrestPatch" + RemoteConfigurationPatchReset: + type: object + description: Value used to unset the remote field (empty object). + additionalProperties: false + RemoteConfigurationFirecrestPatch: + type: object + description: | + The configuration for starting sessions remotely using + the FirecREST API + additionalProperties: false + properties: + kind: + type: string + enum: [ "firecrest" ] + description: Kind of remote resource pool + example: "firecrest" + provider_id: + $ref: "#/components/schemas/RemoteConfigurationFirecrestProviderId" + api_url: + $ref: "#/components/schemas/RemoteConfigurationFirecrestApiUrl" + system_name: + $ref: "#/components/schemas/RemoteConfigurationFirecrestSystemName" + partition: + $ref: "#/components/schemas/RemoteConfigurationFirecrestPartition" IdleThreshold: type: integer description: A threshold in seconds after which a session gets hibernated (0 means no threshold) diff --git a/components/renku_data_services/crc/apispec.py b/components/renku_data_services/crc/apispec.py index b6b73859a..eefb45e44 100644 --- a/components/renku_data_services/crc/apispec.py +++ b/components/renku_data_services/crc/apispec.py @@ -1,11 +1,11 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2025-07-23T11:50:42+00:00 +# timestamp: 2025-09-24T13:34:22+00:00 from __future__ import annotations from enum import Enum -from typing import List, Optional +from typing import List, Optional, Union from pydantic import ConfigDict, Field, RootModel from renku_data_services.crc.apispec_base import BaseAPISpec @@ -48,6 +48,46 @@ class IntegerIds(RootModel[List[IntegerId]]): root: List[IntegerId] = Field(..., examples=[[1, 3, 5]], min_length=1) +class Kind(Enum): + firecrest = "firecrest" + + +class RemoteConfigurationPatchReset(BaseAPISpec): + pass + model_config = ConfigDict( + extra="forbid", + ) + + +class RemoteConfigurationFirecrestPatch(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + kind: Optional[Kind] = Field( + None, description="Kind of remote resource pool", examples=["firecrest"] + ) + provider_id: Optional[str] = Field( + None, + description="The ID of a provider (see oauth2 section).\nThis is used to allow seamless authentication using Renku integrations.\n", + examples=["my-provider"], + ) + api_url: Optional[str] = Field( + None, + description="The base URL of the FirecREST API", + examples=["https://api.cscs.ch/hpc/firecrest/v2"], + ) + system_name: Optional[str] = Field( + None, + description="The name of the system to use with the FirecREST API", + examples=["eiger"], + ) + partition: Optional[str] = Field( + None, + description="The partition to use when submitting jobs", + examples=["normal"], + ) + + class K8sLabel(RootModel[str]): root: str = Field( ..., @@ -373,6 +413,35 @@ class QuotaWithOptionalId(BaseAPISpec): ) +class RemoteConfigurationFirecrest(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + kind: Kind = Field( + ..., description="Kind of remote resource pool", examples=["firecrest"] + ) + provider_id: Optional[str] = Field( + None, + description="The ID of a provider (see oauth2 section).\nThis is used to allow seamless authentication using Renku integrations.\n", + examples=["my-provider"], + ) + api_url: str = Field( + ..., + description="The base URL of the FirecREST API", + examples=["https://api.cscs.ch/hpc/firecrest/v2"], + ) + system_name: str = Field( + ..., + description="The name of the system to use with the FirecREST API", + examples=["eiger"], + ) + partition: Optional[str] = Field( + None, + description="The partition to use when submitting jobs", + examples=["normal"], + ) + + class ResourceClass(BaseAPISpec): model_config = ConfigDict( extra="forbid", @@ -696,28 +765,43 @@ class ResourceClassesWithIdResponse(RootModel[List[ResourceClassWithId]]): ) -class ResourcePool(BaseAPISpec): +class ResourcePoolPatch(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - quota: Optional[QuotaWithOptionalId] = None - classes: List[ResourceClass] - name: str = Field( - ..., + quota: Optional[QuotaPatch] = None + classes: Optional[List[ResourceClassPatchWithId]] = Field( + None, + examples=[ + [ + {"name": "resource class 1", "id": 1}, + {"cpu": 4.5, "max_storage": 10000, "id": 2}, + ] + ], + min_length=1, + ) + name: Optional[str] = Field( + None, description="A name for a specific resource", examples=["the name of a resource"], min_length=5, ) - public: bool = Field( - ..., + public: Optional[bool] = Field( + False, description="A resource pool whose classes can be accessed by anyone", examples=[False], ) - default: bool = Field( - ..., + default: Optional[bool] = Field( + False, description="A default selection for resource classes or resource pools", examples=[False], ) + remote: Optional[ + Union[RemoteConfigurationPatchReset, RemoteConfigurationFirecrestPatch] + ] = Field( + None, + description="Patch for the configuration used by to start sessions remotely\n", + ) idle_threshold: Optional[int] = Field( None, description="A threshold in seconds after which a session gets hibernated (0 means no threshold)", @@ -739,37 +823,32 @@ class ResourcePool(BaseAPISpec): ) -class ResourcePoolPatch(BaseAPISpec): +class ResourcePool(BaseAPISpec): model_config = ConfigDict( extra="forbid", ) - quota: Optional[QuotaPatch] = None - classes: Optional[List[ResourceClassPatchWithId]] = Field( - None, - examples=[ - [ - {"name": "resource class 1", "id": 1}, - {"cpu": 4.5, "max_storage": 10000, "id": 2}, - ] - ], - min_length=1, - ) - name: Optional[str] = Field( - None, + quota: Optional[QuotaWithOptionalId] = None + classes: List[ResourceClass] + name: str = Field( + ..., description="A name for a specific resource", examples=["the name of a resource"], min_length=5, ) - public: Optional[bool] = Field( - False, + public: bool = Field( + ..., description="A resource pool whose classes can be accessed by anyone", examples=[False], ) - default: Optional[bool] = Field( - False, + default: bool = Field( + ..., description="A default selection for resource classes or resource pools", examples=[False], ) + remote: Optional[RemoteConfigurationFirecrest] = Field( + None, + description="The configuration used by Renku to start sessions remotely.\nIf this field is present, the corresponding resource pool starts remote sessions.\n", + ) idle_threshold: Optional[int] = Field( None, description="A threshold in seconds after which a session gets hibernated (0 means no threshold)", @@ -839,6 +918,10 @@ class ResourcePoolPut(BaseAPISpec): description="A default selection for resource classes or resource pools", examples=[False], ) + remote: Optional[RemoteConfigurationFirecrest] = Field( + None, + description="The configuration used by Renku to start sessions remotely.\nIf this field is present, the corresponding resource pool starts remote sessions.\n", + ) idle_threshold: Optional[int] = Field( None, description="A threshold in seconds after which a session gets hibernated (0 means no threshold)", @@ -888,6 +971,10 @@ class ResourcePoolWithId(BaseAPISpec): description="A default selection for resource classes or resource pools", examples=[False], ) + remote: Optional[RemoteConfigurationFirecrest] = Field( + None, + description="The configuration used by Renku to start sessions remotely.\nIf this field is present, the corresponding resource pool starts remote sessions.\n", + ) idle_threshold: Optional[int] = Field( None, description="A threshold in seconds after which a session gets hibernated (0 means no threshold)", @@ -931,6 +1018,10 @@ class ResourcePoolWithIdFiltered(BaseAPISpec): description="A default selection for resource classes or resource pools", examples=[False], ) + remote: Optional[RemoteConfigurationFirecrest] = Field( + None, + description="The configuration used by Renku to start sessions remotely.\nIf this field is present, the corresponding resource pool starts remote sessions.\n", + ) idle_threshold: Optional[int] = Field( None, description="A threshold in seconds after which a session gets hibernated (0 means no threshold)", diff --git a/components/renku_data_services/crc/blueprints.py b/components/renku_data_services/crc/blueprints.py index 96ad1f7d5..48514277c 100644 --- a/components/renku_data_services/crc/blueprints.py +++ b/components/renku_data_services/crc/blueprints.py @@ -14,9 +14,15 @@ from renku_data_services.base_api.misc import validate_body_root_model, validate_db_ids, validate_query from renku_data_services.base_models.validation import validated_json from renku_data_services.crc import apispec, models -from renku_data_services.crc.core import validate_cluster, validate_cluster_patch +from renku_data_services.crc.core import ( + validate_cluster, + validate_cluster_patch, + validate_remote, + validate_remote_patch, + validate_remote_put, +) from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository, UserRepository -from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s.db import QuotaRepository from renku_data_services.users.db import UserRepo as KcUserRepo from renku_data_services.users.models import UserInfo @@ -52,9 +58,15 @@ def post(self) -> BlueprintFactoryResponse: async def _post(_: Request, user: base_models.APIUser, body: apispec.ResourcePool) -> HTTPResponse: cluster = None if body.cluster_id is not None: - cluster = await self.cluster_repo.select(api_user=user, cluster_id=ULID.from_str(body.cluster_id)) - rp = models.ResourcePool.from_dict({**body.model_dump(exclude_none=True), "cluster": cluster}) - + cluster = await self.cluster_repo.select(ULID.from_str(body.cluster_id)) + remote = None + if body.remote: + validate_remote(body=body.remote) + remote = body.remote.model_dump(exclude_none=True, mode="json") + body.remote = None + rp = models.ResourcePool.from_dict( + {**body.model_dump(exclude_none=True), "cluster": cluster, "remote": remote} + ) res = await self.rp_repo.insert_resource_pool(api_user=user, resource_pool=rp) return validated_json(apispec.ResourcePoolWithId, res, status=201) @@ -102,9 +114,14 @@ def put(self) -> BlueprintFactoryResponse: async def _put( _: Request, user: base_models.APIUser, resource_pool_id: int, body: apispec.ResourcePoolPut ) -> HTTPResponse: + # We need to manually set remote to a RemoteConfigurationPatch object + remote = validate_remote_put(body=body.remote) + body.remote = None + res = await self.rp_repo.update_resource_pool( api_user=user, id=resource_pool_id, + remote=remote, **body.model_dump(exclude_none=True), ) if res is None: @@ -125,9 +142,15 @@ def patch(self) -> BlueprintFactoryResponse: async def _patch( _: Request, user: base_models.APIUser, resource_pool_id: int, body: apispec.ResourcePoolPatch ) -> HTTPResponse: + remote = None + if body.remote: + remote = validate_remote_patch(body=body.remote) + body.remote = None + res = await self.rp_repo.update_resource_pool( api_user=user, id=resource_pool_id, + remote=remote, **body.model_dump(exclude_none=True), ) if res is None: @@ -614,7 +637,7 @@ def get(self) -> BlueprintFactoryResponse: @authenticate(self.authenticator) @only_admins async def _handler(_request: Request, user: base_models.APIUser, cluster_id: ULID) -> HTTPResponse: - cluster = await self.repo.select(user, cluster_id) + cluster = await self.repo.select(cluster_id) return validated_json(apispec.ClusterWithId, cluster, status=200) diff --git a/components/renku_data_services/crc/core.py b/components/renku_data_services/crc/core.py index 6b40f79aa..ed9a61861 100644 --- a/components/renku_data_services/crc/core.py +++ b/components/renku_data_services/crc/core.py @@ -1,14 +1,18 @@ """crc modules converters and validators.""" +from urllib.parse import urlparse + +from renku_data_services.base_models import RESET from renku_data_services.crc import apispec, models +from renku_data_services.errors import errors -def validate_cluster(body: apispec.Cluster) -> models.Cluster: +def validate_cluster(body: apispec.Cluster) -> models.ClusterSettings: """Convert a REST API Cluster object to a model Cluster object.""" - return models.Cluster( + return models.ClusterSettings( name=body.name, config_name=body.config_name, - session_protocol=body.session_protocol, + session_protocol=models.SessionProtocol(body.session_protocol.value), session_host=body.session_host, session_port=body.session_port, session_path=body.session_path, @@ -25,7 +29,9 @@ def validate_cluster_patch(patch: apispec.ClusterPatch) -> models.ClusterPatch: return models.ClusterPatch( name=patch.name, config_name=patch.config_name, - session_protocol=patch.session_protocol, + session_protocol=models.SessionProtocol(patch.session_protocol.value) + if patch.session_protocol is not None + else None, session_host=patch.session_host, session_port=patch.session_port, session_path=patch.session_path, @@ -36,3 +42,70 @@ def validate_cluster_patch(patch: apispec.ClusterPatch) -> models.ClusterPatch: session_storage_class=patch.session_storage_class, service_account_name=patch.service_account_name, ) + + +def validate_remote(body: apispec.RemoteConfigurationFirecrest) -> models.RemoteConfigurationFirecrest: + """Validate a remote configuration object.""" + kind = models.RemoteConfigurationKind(body.kind.value) + if kind != models.RemoteConfigurationKind.firecrest: + raise errors.ValidationError(message=f"The kind '{kind}' of remote configuration is not supported.", quiet=True) + validate_firecrest_api_url(body.api_url) + return models.RemoteConfigurationFirecrest( + kind=kind, + provider_id=body.provider_id, + api_url=body.api_url, + system_name=body.system_name, + partition=body.partition, + ) + + +def validate_remote_put( + body: apispec.RemoteConfigurationFirecrest | None, +) -> models.RemoteConfigurationPatch: + """Validate the PUT update to a remote configuration object.""" + if body is None: + return RESET + remote = validate_remote(body=body) + return models.RemoteConfigurationFirecrestPatch( + kind=remote.kind, + provider_id=remote.provider_id, + api_url=remote.api_url, + system_name=remote.system_name, + partition=remote.partition, + ) + + +def validate_remote_patch( + body: apispec.RemoteConfigurationPatchReset | apispec.RemoteConfigurationFirecrestPatch, +) -> models.RemoteConfigurationPatch: + """Validate the patch to a remote configuration object.""" + if isinstance(body, apispec.RemoteConfigurationPatchReset): + return RESET + kind = models.RemoteConfigurationKind(body.kind.value) if body.kind else None + if kind and kind != models.RemoteConfigurationKind.firecrest: + raise errors.ValidationError(message=f"The kind '{kind}' of remote configuration is not supported.", quiet=True) + if body.api_url: + validate_firecrest_api_url(body.api_url) + return models.RemoteConfigurationFirecrestPatch( + kind=kind, + provider_id=body.provider_id, + api_url=body.api_url, + system_name=body.system_name, + partition=body.partition, + ) + + +def validate_firecrest_api_url(url: str) -> None: + """Validate the URL to the FirecREST API.""" + parsed = urlparse(url) + if not parsed.netloc: + raise errors.ValidationError( + message=f"The host for the firecrest api url {url} is not valid, expected a non-empty value.", + quiet=True, + ) + accepted_schemes = ["https"] + if parsed.scheme not in accepted_schemes: + raise errors.ValidationError( + message=f"The scheme for the firecrest api url {url} is not valid, expected one of {accepted_schemes}", + quiet=True, + ) diff --git a/components/renku_data_services/crc/db.py b/components/renku_data_services/crc/db.py index a390fb696..0cfbfd0f5 100644 --- a/components/renku_data_services/crc/db.py +++ b/components/renku_data_services/crc/db.py @@ -20,12 +20,12 @@ import renku_data_services.base_models as base_models from renku_data_services import errors +from renku_data_services.base_models import RESET from renku_data_services.crc import models from renku_data_services.crc import orm as schemas -from renku_data_services.crc.apispec import Protocol as CrcProtocol -from renku_data_services.crc.models import Cluster, ClusterPatch, SavedCluster +from renku_data_services.crc.models import ClusterPatch, ClusterSettings, SavedClusterSettings, SessionProtocol from renku_data_services.crc.orm import ClusterORM -from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s.db import QuotaRepository from renku_data_services.users.db import UserRepo @@ -47,15 +47,13 @@ def _resource_pool_access_control( api_user_has_default_pool_access = not_( # NOTE: The only way to check that a user is allowed to access the default pool is that such a # record does NOT EXIST in the database - select(schemas.RPUserORM.no_default_access) - .where( - and_(schemas.RPUserORM.keycloak_id == api_user.id, schemas.RPUserORM.no_default_access == true()) - ) + select(schemas.UserORM.no_default_access) + .where(and_(schemas.UserORM.keycloak_id == api_user.id, schemas.UserORM.no_default_access == true())) .exists() ) - output = output.join(schemas.RPUserORM, schemas.ResourcePoolORM.users, isouter=True).where( + output = output.join(schemas.UserORM, schemas.ResourcePoolORM.users, isouter=True).where( or_( - schemas.RPUserORM.keycloak_id == api_user.id, # the user is part of the pool + schemas.UserORM.keycloak_id == api_user.id, # the user is part of the pool and_( # the pool is not default but is public schemas.ResourcePoolORM.default != true(), schemas.ResourcePoolORM.public == true() ), @@ -86,15 +84,13 @@ def _classes_user_access_control( api_user_has_default_pool_access = not_( # NOTE: The only way to check that a user is allowed to access the default pool is that such a # record does NOT EXIST in the database - select(schemas.RPUserORM.no_default_access) - .where( - and_(schemas.RPUserORM.keycloak_id == api_user.id, schemas.RPUserORM.no_default_access == true()) - ) + select(schemas.UserORM.no_default_access) + .where(and_(schemas.UserORM.keycloak_id == api_user.id, schemas.UserORM.no_default_access == true())) .exists() ) - output = output.join(schemas.RPUserORM, schemas.ResourcePoolORM.users, isouter=True).where( + output = output.join(schemas.UserORM, schemas.ResourcePoolORM.users, isouter=True).where( or_( - schemas.RPUserORM.keycloak_id == api_user.id, # the user is part of the pool + schemas.UserORM.keycloak_id == api_user.id, # the user is part of the pool and_( # the pool is not default but is public schemas.ResourcePoolORM.default != true(), schemas.ResourcePoolORM.public == true() ), @@ -109,7 +105,7 @@ def _classes_user_access_control( pass case False, _: # The user is not logged in, they can see only the classes from public resource pools - output = output.join(schemas.RPUserORM, schemas.ResourcePoolORM.users, isouter=True).where( + output = output.join(schemas.UserORM, schemas.ResourcePoolORM.users, isouter=True).where( schemas.ResourcePoolORM.public == true(), ) return output @@ -311,6 +307,8 @@ async def insert_resource_pool( raise errors.ValidationError( message="There can only be one default resource pool and one already exists." ) + if not orm.remote_provider_id: + orm.remote_provider_id = None session.add(orm) await session.flush() @@ -423,9 +421,7 @@ async def update_resource_pool(self, api_user: base_models.APIUser, id: int, **k cluster = None if cluster_id is not None: - cluster = await self.__cluster_repo.select( - api_user=api_user, cluster_id=ULID.from_str(cluster_id) - ) + cluster = await self.__cluster_repo.select(ULID.from_str(cluster_id)) rp.cluster_id = cluster_id new_rp_model = new_rp_model.update(cluster=cluster) @@ -475,6 +471,22 @@ async def update_resource_pool(self, api_user: base_models.APIUser, id: int, **k api_user, resource_pool_id=id, resource_class_id=class_id, **cls ) ) + case "remote": + if val is None: + continue + if val is RESET: + rp.remote_provider_id = None + rp.remote_json = None + new_rp_model = new_rp_model.update(remote=None) + continue + if isinstance(val, models.RemoteConfigurationFirecrestPatch): + assert new_rp_model.remote is not None + rp.remote_provider_id = val.provider_id + remote_json = new_rp_model.remote.to_dict() + del remote_json["provider_id"] + rp.remote_json = remote_json + continue + raise errors.ProgrammingError(message=f"Unexpected update value for field remote: {val}") case _: pass new_classes = await gather(*new_classes_coroutines) @@ -653,7 +665,7 @@ async def delete_affinities(self, api_user: base_models.APIUser, resource_pool_i @dataclass -class RespositoryUsers: +class Respository2Users: """Information about which users can access a specific resource pool.""" resource_pool_id: int @@ -677,7 +689,7 @@ async def get_resource_pool_users( api_user: base_models.APIUser, resource_pool_id: int, keycloak_id: Optional[str] = None, - ) -> RespositoryUsers: + ) -> Respository2Users: """Get users of a specific resource pool from the database.""" async with self.session_maker() as session, session.begin(): stmt = ( @@ -688,7 +700,7 @@ async def get_resource_pool_users( if keycloak_id is not None: stmt = stmt.join(schemas.ResourcePoolORM.users, isouter=True).where( or_( - schemas.RPUserORM.keycloak_id == keycloak_id, + schemas.UserORM.keycloak_id == keycloak_id, schemas.ResourcePoolORM.public == true(), schemas.ResourceClassORM.default == true(), ) @@ -700,15 +712,15 @@ async def get_resource_pool_users( specific_user: base_models.User | None = None if keycloak_id: specific_user_res = ( - await session.execute(select(schemas.RPUserORM).where(schemas.RPUserORM.keycloak_id == keycloak_id)) + await session.execute(select(schemas.UserORM).where(schemas.UserORM.keycloak_id == keycloak_id)) ).scalar_one_or_none() specific_user = None if not specific_user_res else specific_user_res.dump() allowed: list[base_models.User] = [] disallowed: list[base_models.User] = [] if rp.default: - disallowed_stmt = select(schemas.RPUserORM).where(schemas.RPUserORM.no_default_access == true()) + disallowed_stmt = select(schemas.UserORM).where(schemas.UserORM.no_default_access == true()) if keycloak_id: - disallowed_stmt = disallowed_stmt.where(schemas.RPUserORM.keycloak_id == keycloak_id) + disallowed_stmt = disallowed_stmt.where(schemas.UserORM.keycloak_id == keycloak_id) disallowed_res = await session.execute(disallowed_stmt) disallowed = [user.dump() for user in disallowed_res.scalars().all()] if specific_user and specific_user not in disallowed: @@ -718,7 +730,7 @@ async def get_resource_pool_users( allowed = [specific_user] elif not rp.public and not rp.default: allowed = [user.dump() for user in rp.users] - return RespositoryUsers(rp.id, allowed, disallowed) + return Respository2Users(rp.id, allowed, disallowed) async def get_user_resource_pools( self, @@ -738,7 +750,7 @@ async def get_user_resource_pools( stmt = stmt.where( or_( schemas.ResourcePoolORM.public == true(), - schemas.ResourcePoolORM.users.any(schemas.RPUserORM.keycloak_id == keycloak_id), + schemas.ResourcePoolORM.users.any(schemas.UserORM.keycloak_id == keycloak_id), ) ) if resource_pool_name is not None: @@ -765,14 +777,14 @@ async def update_user_resource_pools( if kc_user is None: raise errors.MissingResourceError(message=f"The user with ID {keycloak_id} does not exist") stmt = ( - select(schemas.RPUserORM) - .where(schemas.RPUserORM.keycloak_id == keycloak_id) - .options(selectinload(schemas.RPUserORM.resource_pools)) + select(schemas.UserORM) + .where(schemas.UserORM.keycloak_id == keycloak_id) + .options(selectinload(schemas.UserORM.resource_pools)) ) res = await session.execute(stmt) user = res.scalars().first() if user is None: - user = schemas.RPUserORM(keycloak_id=keycloak_id) + user = schemas.UserORM(keycloak_id=keycloak_id) session.add(user) stmt_rp = ( select(schemas.ResourcePoolORM) @@ -816,9 +828,9 @@ async def delete_resource_pool_user( """Remove a user from a specific resource pool.""" async with self.session_maker() as session, session.begin(): sub = ( - select(schemas.RPUserORM.id) - .join(schemas.ResourcePoolORM, schemas.RPUserORM.resource_pools) - .where(schemas.RPUserORM.keycloak_id == keycloak_id) + select(schemas.UserORM.id) + .join(schemas.ResourcePoolORM, schemas.UserORM.resource_pools) + .where(schemas.UserORM.keycloak_id == keycloak_id) .where(schemas.ResourcePoolORM.id == resource_pool_id) ) stmt = delete(schemas.resource_pools_users).where(schemas.resource_pools_users.c.user_id.in_(sub)) @@ -859,12 +871,12 @@ async def update_resource_pool_users( for no_default_user in users_to_modify ] ) - stmt_usr = select(schemas.RPUserORM).where(schemas.RPUserORM.keycloak_id.in_(user_ids)) + stmt_usr = select(schemas.UserORM).where(schemas.UserORM.keycloak_id.in_(user_ids)) res_usr = await session.execute(stmt_usr) users_to_add_exist = res_usr.scalars().all() user_ids_to_add_exist = [i.keycloak_id for i in users_to_add_exist] users_to_add_missing = [ - schemas.RPUserORM(keycloak_id=user_id) for user_id in user_ids if user_id not in user_ids_to_add_exist + schemas.UserORM(keycloak_id=user_id) for user_id in user_ids if user_id not in user_ids_to_add_exist ] if append: rp_user_ids = {rp.id for rp in rp.users} @@ -878,13 +890,13 @@ async def update_resource_pool_users( async def update_user(self, api_user: base_models.APIUser, keycloak_id: str, **kwargs: Any) -> base_models.User: """Update a specific user.""" async with self.session_maker() as session, session.begin(): - stmt = select(schemas.RPUserORM).where(schemas.RPUserORM.keycloak_id == keycloak_id) + stmt = select(schemas.UserORM).where(schemas.UserORM.keycloak_id == keycloak_id) res = await session.execute(stmt) - user: Optional[schemas.RPUserORM] = res.scalars().first() + user: Optional[schemas.UserORM] = res.scalars().first() if not user: - user = schemas.RPUserORM(keycloak_id=keycloak_id) + user = schemas.UserORM(keycloak_id=keycloak_id) session.add(user) - allowed_updates = set(["no_default_access"]) + allowed_updates = {"no_default_access"} if not set(kwargs.keys()).issubset(allowed_updates): raise errors.ValidationError( message=f"Only the following fields {allowed_updates} can be updated for a resource pool user.." @@ -900,26 +912,26 @@ class ClusterRepository: session_maker: Callable[..., AsyncSession] - async def select_all(self) -> AsyncGenerator[SavedCluster, Any]: + async def select_all(self, cluster_id: ULID | None = None) -> AsyncGenerator[SavedClusterSettings, Any]: """Get cluster configurations from the database.""" async with self.session_maker() as session: - clusters = await session.stream_scalars(select(ClusterORM)) + query = select(ClusterORM) + if cluster_id is not None: + query = query.where(ClusterORM.id == cluster_id) + + clusters = await session.stream_scalars(query) async for cluster in clusters: yield cluster.dump() - async def select(self, api_user: base_models.APIUser, cluster_id: ULID) -> SavedCluster: + async def select(self, cluster_id: ULID) -> SavedClusterSettings: """Get cluster configurations from the database.""" + async for cluster in self.select_all(cluster_id): + return cluster - async with self.session_maker() as session: - r = await session.scalars(select(ClusterORM).where(ClusterORM.id == cluster_id)) - cluster = r.one_or_none() - if cluster is None: - raise errors.MissingResourceError(message=f"Cluster definition id='{cluster_id}' does not exist.") - - return cluster.dump() + raise errors.MissingResourceError(message=f"Cluster definition id='{cluster_id}' does not exist.") @_only_admins - async def insert(self, api_user: base_models.APIUser, cluster: Cluster) -> Cluster: + async def insert(self, api_user: base_models.APIUser, cluster: ClusterSettings) -> ClusterSettings: """Creates a new cluster configuration.""" cluster_orm = ClusterORM.load(cluster) @@ -931,7 +943,7 @@ async def insert(self, api_user: base_models.APIUser, cluster: Cluster) -> Clust return cluster_orm.dump() @_only_admins - async def update(self, api_user: base_models.APIUser, cluster: ClusterPatch, cluster_id: ULID) -> Cluster: + async def update(self, api_user: base_models.APIUser, cluster: ClusterPatch, cluster_id: ULID) -> ClusterSettings: """Updates a cluster configuration.""" async with self.session_maker() as session, session.begin(): @@ -941,7 +953,7 @@ async def update(self, api_user: base_models.APIUser, cluster: ClusterPatch, clu for key, value in asdict(cluster).items(): match key, value: - case "session_protocol", CrcProtocol(): + case "session_protocol", SessionProtocol(): setattr(saved_cluster, key, value.value) case "session_storage_class", "": # If we received an empty string in the storage class, reset it to the default storage class by diff --git a/components/renku_data_services/crc/models.py b/components/renku_data_services/crc/models.py index efb91e21d..44ed4fad2 100644 --- a/components/renku_data_services/crc/models.py +++ b/components/renku_data_services/crc/models.py @@ -6,16 +6,14 @@ from copy import deepcopy from dataclasses import asdict, dataclass, field from enum import StrEnum -from typing import TYPE_CHECKING, Any, Optional, Protocol +from typing import Any, Optional, Protocol, Self from uuid import uuid4 -from ulid import ULID - from renku_data_services import errors +from renku_data_services.base_models import RESET, ResetType from renku_data_services.errors import ValidationError - -if TYPE_CHECKING: - from renku_data_services.crc.apispec import Protocol as CrcApiProtocol +from renku_data_services.k8s.constants import ClusterId +from renku_data_services.notebooks.cr_amalthea_session import TlsSecret class ResourcesProtocol(Protocol): @@ -188,13 +186,20 @@ def is_resource_class_compatible(self, rc: ResourceClass) -> bool: return rc <= self +class SessionProtocol(StrEnum): + """Valid Session protocol values.""" + + HTTP = "http" + HTTPS = "https" + + @dataclass(frozen=True, eq=True, kw_only=True) class ClusterPatch: """K8s Cluster settings patch.""" name: str | None config_name: str | None - session_protocol: CrcApiProtocol | None + session_protocol: SessionProtocol | None session_host: str | None session_port: int | None session_path: str | None @@ -205,12 +210,12 @@ class ClusterPatch: @dataclass(frozen=True, eq=True, kw_only=True) -class Cluster: +class ClusterSettings: """K8s Cluster settings.""" name: str config_name: str - session_protocol: CrcApiProtocol + session_protocol: SessionProtocol session_host: str session_port: int session_path: str @@ -235,12 +240,39 @@ def to_cluster_patch(self) -> ClusterPatch: service_account_name=self.service_account_name, ) + def get_storage_class(self) -> str | None: + """Get the default storage class for the cluster.""" + + return self.session_storage_class + + def get_ingress_parameters(self, server_name: str) -> tuple[str, str, str, str, TlsSecret | None, dict[str, str]]: + """Returns the ingress parameters of the cluster.""" + + host = self.session_host + base_server_path = f"{self.session_path}/{server_name}" + if self.session_port in [80, 443]: + # No need to specify the port in these cases. If we specify the port on https or http + # when it is the usual port then the URL callbacks for authentication do not work. + # I.e. if the callback is registered as https://some.host/link it will not work when a redirect + # like https://some.host:443/link is used. + base_server_url = f"{self.session_protocol.value}://{host}{base_server_path}" + else: + base_server_url = f"{self.session_protocol.value}://{host}:{self.session_port}{base_server_path}" + base_server_https_url = base_server_url + ingress_annotations = self.session_ingress_annotations + + tls_secret = ( + None if self.session_tls_secret_name is None else TlsSecret(adopt=False, name=self.session_tls_secret_name) + ) + + return base_server_path, base_server_url, base_server_https_url, host, tls_secret, ingress_annotations + @dataclass(frozen=True, eq=True, kw_only=True) -class SavedCluster(Cluster): - """K8s Cluster settings from the DB.""" +class SavedClusterSettings(ClusterSettings): + """Saved K8s Cluster settings.""" - id: ULID + id: ClusterId @dataclass(frozen=True, eq=True, kw_only=True) @@ -255,7 +287,8 @@ class ResourcePool: hibernation_threshold: int | None = None default: bool = False public: bool = False - cluster: SavedCluster | None = None + remote: RemoteConfigurationFirecrest | None = None + cluster: SavedClusterSettings | None = None def __post_init__(self) -> None: """Validate the resource pool after initialization.""" @@ -265,6 +298,10 @@ def __post_init__(self) -> None: raise ValidationError(message="The default resource pool has to be public.") if self.default and self.quota is not None: raise ValidationError(message="A default resource pool cannot have a quota.") + if self.remote and self.default: + raise ValidationError(message="The default resource pool cannot start remote sessions.") + if self.remote and self.public: + raise ValidationError(message="A resource pool which starts remote sessions cannot be public.") if (self.idle_threshold and self.idle_threshold < 0) or ( self.hibernation_threshold and self.hibernation_threshold < 0 ): @@ -299,14 +336,21 @@ def update(self, **kwargs: Any) -> ResourcePool: """Determine if an update to a resource pool is valid and if valid create new updated resource pool.""" if self.default and "default" in kwargs and not kwargs["default"]: raise ValidationError(message="A default resource pool cannot be made non-default.") + if "remote" in kwargs and kwargs["remote"] is RESET: + kwargs["remote"] = None + if "remote" in kwargs and isinstance(kwargs["remote"], RemoteConfigurationFirecrestPatch): + remote_dict: dict[str, Any] = self.remote.to_dict() if self.remote else dict() + remote_dict.update(kwargs["remote"].to_dict()) + kwargs["remote"] = remote_dict return ResourcePool.from_dict({**asdict(self), **kwargs}) @classmethod def from_dict(cls, data: dict) -> ResourcePool: """Create the model from a plain dictionary.""" - cluster: SavedCluster | None = None + cluster: SavedClusterSettings | None = None quota: Quota | None = None classes: list[ResourceClass] = [] + remote: RemoteConfigurationFirecrest | None = None if "quota" in data and isinstance(data["quota"], dict): quota = Quota.from_dict(data["quota"]) @@ -319,11 +363,11 @@ def from_dict(cls, data: dict) -> ResourcePool: classes = [ResourceClass.from_dict(c) if isinstance(c, dict) else c for c in data["classes"]] match tmp := data.get("cluster"): - case SavedCluster(): + case SavedClusterSettings(): # This has to be before the dict() case, as this is also an instance of dict. cluster = tmp case dict(): - cluster = SavedCluster( + cluster = SavedClusterSettings( name=tmp["name"], config_name=tmp["config_name"], session_protocol=tmp["session_protocol"], @@ -341,6 +385,16 @@ def from_dict(cls, data: dict) -> ResourcePool: case unknown: raise errors.ValidationError(message=f"Got unexpected cluster data {unknown} when creating model") + match tmp := data.get("remote"): + case RemoteConfigurationFirecrest(): + remote = tmp + case dict(): + remote = RemoteConfigurationFirecrest.from_dict(tmp) + case None: + remote = None + case unknown: + raise errors.ValidationError(message=f"Got unexpected remote data {unknown} when creating model") + return cls( name=data["name"], id=data.get("id"), @@ -348,6 +402,7 @@ def from_dict(cls, data: dict) -> ResourcePool: quota=quota, default=data.get("default", False), public=data.get("public", False), + remote=remote, idle_threshold=data.get("idle_threshold"), hibernation_threshold=data.get("hibernation_threshold"), cluster=cluster, @@ -366,3 +421,61 @@ def get_default_resource_class(self) -> ResourceClass | None: if rc.default: return rc return None + + +class RemoteConfigurationKind(StrEnum): + """Remote resource pool kinds.""" + + firecrest = "firecrest" + + +@dataclass(frozen=True, eq=True, kw_only=True) +class RemoteConfigurationFirecrest: + """Model for remote configurations using the FirecREST API.""" + + kind: RemoteConfigurationKind = RemoteConfigurationKind.firecrest + provider_id: str | None = None + api_url: str + system_name: str + partition: str | None = None + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> Self: + """Convert a dict object into a RemoteConfiguration instance.""" + kind = data.get("kind") + if kind == RemoteConfigurationKind.firecrest.value: + return cls( + kind=RemoteConfigurationKind.firecrest, + provider_id=data.get("provider_id") or None, + api_url=data["api_url"], + system_name=data["system_name"], + partition=data.get("partition") or None, + ) + raise errors.ValidationError(message=f"Invalid kind for remote configuration: '{kind}'") + + def to_dict(self) -> dict[str, Any]: + """Convert this instance of RemoteConfiguration into a dictionary.""" + res = asdict(self) + res["kind"] = self.kind.value + return res + + +@dataclass(frozen=True, eq=True, kw_only=True) +class RemoteConfigurationFirecrestPatch: + """Model for remote configurations using the FirecREST API.""" + + kind: RemoteConfigurationKind | None = None + provider_id: str | None = None + api_url: str | None = None + system_name: str | None = None + partition: str | None = None + + def to_dict(self) -> dict[str, Any]: + """Convert this instance of RemoteConfigurationPatch into a dictionary.""" + res = asdict(self) + if self.kind: + res["kind"] = self.kind.value + return res + + +RemoteConfigurationPatch = ResetType | RemoteConfigurationFirecrestPatch diff --git a/components/renku_data_services/crc/orm.py b/components/renku_data_services/crc/orm.py index 3467666af..595786bfc 100644 --- a/components/renku_data_services/crc/orm.py +++ b/components/renku_data_services/crc/orm.py @@ -1,8 +1,19 @@ """SQLAlchemy schemas for the CRC database.""" -from typing import Optional - -from sqlalchemy import JSON, BigInteger, Column, Identity, Integer, MetaData, String, Table +from __future__ import annotations + +from typing import Any, Optional + +from sqlalchemy import ( + JSON, + BigInteger, + Column, + Identity, + Integer, + MetaData, + String, + Table, +) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, relationship from sqlalchemy.schema import ForeignKey @@ -10,9 +21,11 @@ import renku_data_services.base_models as base_models from renku_data_services.app_config import logging +from renku_data_services.connected_services import orm as cs_schemas from renku_data_services.crc import models -from renku_data_services.crc.apispec import Protocol as CrcApiProtocol +from renku_data_services.crc.models import ClusterSettings, SavedClusterSettings, SessionProtocol from renku_data_services.errors import errors +from renku_data_services.k8s.constants import ClusterId from renku_data_services.utils.sqlalchemy import ULIDType logger = logging.getLogger(__name__) @@ -37,7 +50,7 @@ class BaseORM(MappedAsDataclass, DeclarativeBase): ) -class RPUserORM(BaseORM): +class UserORM(BaseORM): """Stores the Keycloak user ID for controlling user access to resource pools. Used in combination with the `resource_pool_users` table this table provides information @@ -50,7 +63,7 @@ class RPUserORM(BaseORM): __tablename__ = "users" keycloak_id: Mapped[str] = mapped_column(String(50), unique=True, index=True) no_default_access: Mapped[bool] = mapped_column(default=False, insert_default=False) - resource_pools: Mapped[list["ResourcePoolORM"]] = relationship( + resource_pools: Mapped[list[ResourcePoolORM]] = relationship( secondary=resource_pools_users, back_populates="users", default_factory=list, @@ -59,7 +72,7 @@ class RPUserORM(BaseORM): id: Mapped[int] = mapped_column(Integer, Identity(always=True), primary_key=True, init=False) @classmethod - def load(cls, user: base_models.User) -> "RPUserORM": + def load(cls, user: base_models.User) -> UserORM: """Create an ORM object from a user model.""" return cls(keycloak_id=user.keycloak_id, no_default_access=user.no_default_access) @@ -82,17 +95,17 @@ class ResourceClassORM(BaseORM): resource_pool_id: Mapped[Optional[int]] = mapped_column( ForeignKey("resource_pools.id", ondelete="CASCADE"), default=None, index=True ) - resource_pool: Mapped[Optional["ResourcePoolORM"]] = relationship( + resource_pool: Mapped[Optional[ResourcePoolORM]] = relationship( back_populates="classes", default=None, lazy="joined" ) id: Mapped[int] = mapped_column(Integer, Identity(always=True), primary_key=True, default=None, init=False) - tolerations: Mapped[list["TolerationORM"]] = relationship( + tolerations: Mapped[list[TolerationORM]] = relationship( back_populates="resource_class", default_factory=list, cascade="save-update, merge, delete", lazy="selectin", ) - node_affinities: Mapped[list["NodeAffintyORM"]] = relationship( + node_affinities: Mapped[list[NodeAffintyORM]] = relationship( back_populates="resource_class", default_factory=list, cascade="save-update, merge, delete", @@ -100,7 +113,7 @@ class ResourceClassORM(BaseORM): ) @classmethod - def load(cls, resource_class: models.ResourceClass) -> "ResourceClassORM": + def load(cls, resource_class: models.ResourceClass) -> ResourceClassORM: """Create a ORM object from the resource class model.""" return cls( name=resource_class.name, @@ -158,13 +171,12 @@ class ClusterORM(BaseORM): # in the cluster in the namespace where the sessions will be launched. service_account_name: Mapped[str | None] = mapped_column(String(256), default=None, nullable=True) - def dump(self) -> models.SavedCluster: + def dump(self) -> SavedClusterSettings: """Create a cluster model from the ORM object.""" - return models.SavedCluster( - id=self.id, + return SavedClusterSettings( name=self.name, config_name=self.config_name, - session_protocol=CrcApiProtocol[self.session_protocol], + session_protocol=SessionProtocol(self.session_protocol), session_host=self.session_host, session_port=self.session_port, session_path=self.session_path, @@ -172,10 +184,11 @@ def dump(self) -> models.SavedCluster: session_tls_secret_name=self.session_tls_secret_name, session_storage_class=self.session_storage_class, service_account_name=self.service_account_name, + id=ClusterId(self.id), ) @classmethod - def load(cls, cluster: models.Cluster) -> "ClusterORM": + def load(cls, cluster: ClusterSettings) -> ClusterORM: """Create an ORM object from the cluster model.""" return ClusterORM( name=cluster.name, @@ -197,13 +210,13 @@ class ResourcePoolORM(BaseORM): __tablename__ = "resource_pools" name: Mapped[str] = mapped_column(String(40), index=True) quota: Mapped[Optional[str]] = mapped_column(String(63), index=True, default=None) - users: Mapped[list["RPUserORM"]] = relationship( + users: Mapped[list[UserORM]] = relationship( secondary=resource_pools_users, back_populates="resource_pools", default_factory=list, repr=False, ) - classes: Mapped[list["ResourceClassORM"]] = relationship( + classes: Mapped[list[ResourceClassORM]] = relationship( back_populates="resource_pool", default_factory=list, cascade="save-update, merge, delete", @@ -217,6 +230,16 @@ class ResourcePoolORM(BaseORM): hibernation_threshold: Mapped[Optional[int]] = mapped_column(default=None) default: Mapped[bool] = mapped_column(default=False, index=True) public: Mapped[bool] = mapped_column(default=False, index=True) + remote_provider_id: Mapped[str | None] = mapped_column( + ForeignKey(cs_schemas.OAuth2ClientORM.id, ondelete="RESTRICT", name="resource_pools_remote_provider_id_fk"), + default=None, + server_default=None, + nullable=True, + index=True, + ) + remote_json: Mapped[dict[str, Any] | None] = mapped_column( + JSONVariant, default=None, server_default=None, nullable=True + ) id: Mapped[int] = mapped_column("id", Integer, Identity(always=True), primary_key=True, default=None, init=False) cluster_id: Mapped[Optional[ULID]] = mapped_column( ForeignKey(ClusterORM.id, ondelete="SET NULL"), default=None, index=True @@ -224,7 +247,7 @@ class ResourcePoolORM(BaseORM): cluster: Mapped[Optional[ClusterORM]] = relationship(viewonly=True, default=None, lazy="selectin", init=False) @classmethod - def load(cls, resource_pool: models.ResourcePool) -> "ResourcePoolORM": + def load(cls, resource_pool: models.ResourcePool) -> ResourcePoolORM: """Create an ORM object from the resource pool model.""" quota = None if resource_pool.quota is not None: @@ -234,6 +257,13 @@ def load(cls, resource_pool: models.ResourcePool) -> "ResourcePoolORM": if resource_pool.cluster is not None: cluster_id = resource_pool.cluster.id + remote_provider_id = None + remote_json = None + if resource_pool.remote: + remote_provider_id = resource_pool.remote.provider_id + remote_json = resource_pool.remote.to_dict() + del remote_json["provider_id"] + return cls( name=resource_pool.name, quota=quota, @@ -242,6 +272,8 @@ def load(cls, resource_pool: models.ResourcePool) -> "ResourcePoolORM": hibernation_threshold=resource_pool.hibernation_threshold, public=resource_pool.public, default=resource_pool.default, + remote_provider_id=remote_provider_id, + remote_json=remote_json, cluster_id=cluster_id, ) @@ -262,6 +294,7 @@ def dump( f"Using the quota {quota} in the response." ) cluster = None if self.cluster is None else self.cluster.dump() + remote = self._dump_remote() return models.ResourcePool( id=self.id, name=self.name, @@ -271,16 +304,25 @@ def dump( hibernation_threshold=self.hibernation_threshold, public=self.public, default=self.default, + remote=remote, cluster=cluster, ) + def _dump_remote(self) -> models.RemoteConfigurationFirecrest | None: + """Create a remote_configuration model from the corresponding column of the ORM object.""" + if self.remote_json is None: + return None + return models.RemoteConfigurationFirecrest.from_dict( + {**self.remote_json, "provider_id": self.remote_provider_id} + ) + class TolerationORM(BaseORM): """The key for a K8s toleration used to schedule loads on tainted nodes.""" __tablename__ = "tolerations" key: Mapped[str] = mapped_column(String(63), index=True) - resource_class: Mapped[Optional["ResourceClassORM"]] = relationship( + resource_class: Mapped[Optional[ResourceClassORM]] = relationship( back_populates="tolerations", default=None, lazy="selectin" ) resource_class_id: Mapped[Optional[int]] = mapped_column( @@ -294,7 +336,7 @@ class NodeAffintyORM(BaseORM): __tablename__ = "node_affinities" key: Mapped[str] = mapped_column(String(63), index=True) - resource_class: Mapped[Optional["ResourceClassORM"]] = relationship( + resource_class: Mapped[Optional[ResourceClassORM]] = relationship( back_populates="node_affinities", default=None, lazy="selectin" ) resource_class_id: Mapped[Optional[int]] = mapped_column( @@ -304,7 +346,7 @@ class NodeAffintyORM(BaseORM): id: Mapped[int] = mapped_column("id", Integer, Identity(always=True), primary_key=True, default=None, init=False) @classmethod - def load(cls, affinity: models.NodeAffinity) -> "NodeAffintyORM": + def load(cls, affinity: models.NodeAffinity) -> NodeAffintyORM: """Create an ORM object from the node affinity model.""" return cls( key=affinity.key, diff --git a/components/renku_data_services/data_connectors/core.py b/components/renku_data_services/data_connectors/core.py index 193a883af..fc0860419 100644 --- a/components/renku_data_services/data_connectors/core.py +++ b/components/renku_data_services/data_connectors/core.py @@ -156,10 +156,6 @@ async def validate_unsaved_global_data_connector( # Fetch DOI metadata rclone_metadata = await validator.get_doi_metadata(configuration=data_connector.storage.configuration) - if rclone_metadata is None: - raise errors.ValidationError( - message=f"Could not resolve DOI {data_connector.storage.configuration.get("doi", "")}" - ) metadata = await get_dataset_metadata(rclone_metadata=rclone_metadata) name = data_connector.name diff --git a/components/renku_data_services/data_connectors/db.py b/components/renku_data_services/data_connectors/db.py index 96a1a30f0..6effa62b3 100644 --- a/components/renku_data_services/data_connectors/db.py +++ b/components/renku_data_services/data_connectors/db.py @@ -39,7 +39,6 @@ from renku_data_services.search.db import SearchUpdatesRepo from renku_data_services.search.decorators import update_search_document from renku_data_services.secrets import orm as secrets_schemas -from renku_data_services.secrets.core import encrypt_user_secret from renku_data_services.secrets.models import SecretKind from renku_data_services.storage.rclone import RCloneValidator from renku_data_services.users.db import UserRepo @@ -933,8 +932,7 @@ async def patch_data_connector_secrets( del existing_secrets_as_dict[name] continue - encrypted_value, encrypted_key = await encrypt_user_secret( - user_repo=self.user_repo, + encrypted_value, encrypted_key = await self.user_repo.encrypt_user_secret( requested_by=user, secret_service_public_key=self.secret_service_public_key, secret_value=value, diff --git a/components/renku_data_services/git/gitlab.py b/components/renku_data_services/git/gitlab.py index aa15b6689..138e8103f 100644 --- a/components/renku_data_services/git/gitlab.py +++ b/components/renku_data_services/git/gitlab.py @@ -115,6 +115,15 @@ def _process_projects( return result +@dataclass(kw_only=True) +class EmptyGitlabAPI: + """An empty gitlab API used to decouple gitlab from Renku.""" + + async def filter_projects_by_access_level(self, _: APIUser, __: list[str], ___: GitlabAccessLevel) -> list[str]: + """Always return an empty list.""" + return [] + + @dataclass(kw_only=True) class DummyGitlabAPI: """Dummy gitlab API. diff --git a/components/renku_data_services/k8s/client_interfaces.py b/components/renku_data_services/k8s/client_interfaces.py index 43cdc41d0..666d3eb9c 100644 --- a/components/renku_data_services/k8s/client_interfaces.py +++ b/components/renku_data_services/k8s/client_interfaces.py @@ -1,67 +1,95 @@ """Required interfaces for k8s clients.""" -from abc import ABC, abstractmethod -from typing import Any +from __future__ import annotations +from collections.abc import AsyncIterable +from typing import Any, Protocol -class K8sCoreClientInterface(ABC): - """Defines what functionality is required for the core k8s client.""" +from kubernetes_asyncio.client import V1DeleteOptions, V1PriorityClass, V1ResourceQuota - @abstractmethod - def read_namespaced_resource_quota(self, name: Any, namespace: Any, **kwargs: Any) -> Any: - """Get a resource quota.""" - ... +from renku_data_services.k8s.models import K8sObject, K8sObjectFilter, K8sObjectMeta, K8sSecret + + +class ResourceQuotaClient(Protocol): + """Methods to manipulate ResourceQuota kubernetes resources.""" - @abstractmethod - def list_namespaced_resource_quota(self, namespace: Any, **kwargs: Any) -> Any: + def list_resource_quota(self, namespace: str, label_selector: str) -> list[V1ResourceQuota]: """List resource quotas.""" ... - @abstractmethod - def create_namespaced_resource_quota(self, namespace: Any, body: Any, **kwargs: Any) -> Any: + def read_resource_quota(self, name: str, namespace: str) -> V1ResourceQuota: + """Get a resource quota.""" + ... + + def create_resource_quota(self, namespace: str, body: V1ResourceQuota) -> None: """Create a resource quota.""" ... - @abstractmethod - def delete_namespaced_resource_quota(self, name: Any, namespace: Any, **kwargs: Any) -> Any: + def delete_resource_quota(self, name: str, namespace: str) -> None: """Delete a resource quota.""" ... - @abstractmethod - def patch_namespaced_resource_quota(self, name: Any, namespace: Any, body: Any, **kwargs: Any) -> Any: + def patch_resource_quota(self, name: str, namespace: str, body: V1ResourceQuota) -> None: """Update a resource quota.""" ... - @abstractmethod - def delete_namespaced_secret(self, name: Any, namespace: Any, **kwargs: Any) -> Any: - """Delete a secret.""" - ... - @abstractmethod - def create_namespaced_secret(self, namespace: Any, body: Any, **kwargs: Any) -> Any: +class SecretClient(Protocol): + """Methods to manipulate Secret kubernetes resources.""" + + async def create_secret(self, secret: K8sSecret) -> K8sSecret: """Create a secret.""" ... - @abstractmethod - def patch_namespaced_secret(self, name: Any, namespace: Any, body: Any, **kwargs: Any) -> Any: + async def patch_secret(self, secret: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: """Patch an existing secret.""" ... + async def delete_secret(self, secret: K8sObjectMeta) -> None: + """Delete a secret.""" + ... + -class K8sSchedudlingClientInterface(ABC): - """Defines what functionality is required for the scheduling k8s client.""" +class PriorityClassClient(Protocol): + """Methods to manipulate kubernetes Priority Class resources.""" - @abstractmethod - def create_priority_class(self, body: Any, **kwargs: Any) -> Any: + def create_priority_class(self, body: V1PriorityClass) -> V1PriorityClass: """Create a priority class.""" ... - @abstractmethod - def delete_priority_class(self, name: Any, **kwargs: Any) -> Any: + def read_priority_class(self, name: str) -> V1PriorityClass | None: + """Retrieve a priority class.""" + ... + + def delete_priority_class(self, name: str, body: V1DeleteOptions) -> None: """Delete a priority class.""" ... - @abstractmethod - def get_priority_class(self, name: Any, **kwargs: Any) -> Any: - """Retrieve a priority class.""" + +class K8sClient(Protocol): + """Methods to manipulate resources on a Kubernetes cluster.""" + + async def create(self, obj: K8sObject, refresh: bool) -> K8sObject: + """Create the k8s object.""" + ... + + async def patch(self, meta: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: + """Patch a k8s object. + + If the patch is a list we assume that we have a rfc6902 json patch like + `[{ "op": "add", "path": "/a/b/c", "value": [ "foo", "bar" ] }]`. + If the patch is a dictionary then it is considered to be a rfc7386 json merge patch. + """ + ... + + async def delete(self, meta: K8sObjectMeta) -> None: + """Delete a k8s object.""" + ... + + async def get(self, meta: K8sObjectMeta) -> K8sObject | None: + """Get a specific k8s object, None is returned if the object does not exist.""" + ... + + def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: + """List all k8s objects.""" ... diff --git a/components/renku_data_services/k8s/clients.py b/components/renku_data_services/k8s/clients.py index 7080ab421..1535a66aa 100644 --- a/components/renku_data_services/k8s/clients.py +++ b/components/renku_data_services/k8s/clients.py @@ -2,14 +2,13 @@ from __future__ import annotations -import asyncio import contextlib import multiprocessing.synchronize -from collections.abc import AsyncIterable, Coroutine +from collections.abc import AsyncIterable from copy import deepcopy from multiprocessing import Lock from multiprocessing.synchronize import Lock as LockType -from typing import TYPE_CHECKING, Any +from typing import Any from uuid import uuid4 import kr8s @@ -18,21 +17,21 @@ from kubernetes.config.incluster_config import SERVICE_CERT_FILENAME, SERVICE_TOKEN_FILENAME, InClusterConfigLoader from renku_data_services.errors import errors -from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface, K8sSchedudlingClientInterface -from renku_data_services.k8s.models import APIObjectInCluster, K8sObjectFilter - -if TYPE_CHECKING: - from renku_data_services.k8s.constants import ClusterId - from renku_data_services.k8s.models import ( - GVK, - Cluster, - K8sObject, - K8sObjectMeta, - ) - from renku_data_services.k8s_watcher import K8sDbCache - - -class K8sCoreClient(K8sCoreClientInterface): # pragma:nocover +from renku_data_services.k8s.client_interfaces import K8sClient, PriorityClassClient, ResourceQuotaClient, SecretClient +from renku_data_services.k8s.constants import ClusterId +from renku_data_services.k8s.db import K8sDbCache +from renku_data_services.k8s.models import ( + GVK, + APIObjectInCluster, + ClusterConnection, + K8sObject, + K8sObjectFilter, + K8sObjectMeta, + K8sSecret, +) + + +class K8sCoreClient(ResourceQuotaClient): """Real k8s core API client that exposes the required functions.""" def __init__(self) -> None: @@ -45,40 +44,56 @@ def __init__(self) -> None: config.load_config() self.client = client.CoreV1Api() - def read_namespaced_resource_quota(self, name: str, namespace: str, **kwargs: dict) -> Any: + def read_resource_quota(self, name: str, namespace: str) -> client.V1ResourceQuota: """Get a resource quota.""" - return self.client.read_namespaced_resource_quota(name, namespace, **kwargs) + return self.client.read_namespaced_resource_quota(name, namespace) - def list_namespaced_resource_quota(self, namespace: str, **kwargs: dict) -> Any: + def list_resource_quota(self, namespace: str, label_selector: str) -> list[client.V1ResourceQuota]: """List resource quotas.""" - return self.client.list_namespaced_resource_quota(namespace, **kwargs) + return list(self.client.list_namespaced_resource_quota(namespace, label_selector=label_selector).items()) - def create_namespaced_resource_quota(self, namespace: str, body: dict, **kwargs: dict) -> Any: + def create_resource_quota(self, namespace: str, body: client.V1ResourceQuota) -> None: """Create a resource quota.""" - return self.client.create_namespaced_resource_quota(namespace, body, **kwargs) + self.client.create_namespaced_resource_quota(namespace, body) - def delete_namespaced_resource_quota(self, name: str, namespace: str, **kwargs: dict) -> Any: + def delete_resource_quota(self, name: str, namespace: str) -> None: """Delete a resource quota.""" - return self.client.delete_namespaced_resource_quota(name, namespace, **kwargs) + try: + self.client.delete_namespaced_resource_quota(name, namespace) + except client.ApiException as e: + if e.status == 404: + # If the thing we are trying to delete is not there, we have the desired state so we can just go on. + return None + raise - def patch_namespaced_resource_quota(self, name: str, namespace: str, body: dict, **kwargs: dict) -> Any: + def patch_resource_quota(self, name: str, namespace: str, body: client.V1ResourceQuota) -> None: """Update a resource quota.""" - return self.client.patch_namespaced_resource_quota(name, namespace, body, **kwargs) + self.client.patch_namespaced_resource_quota(name, namespace, body) - def delete_namespaced_secret(self, name: str, namespace: str, **kwargs: dict) -> Any: - """Delete a secret.""" - return self.client.delete_namespaced_secret(name, namespace, **kwargs) - def create_namespaced_secret(self, namespace: str, body: dict, **kwargs: dict) -> Any: +class K8sSecretClient(SecretClient): + """A wrapper around a kr8s k8s client, acts on Secrets.""" + + def __init__(self, client: K8sClient) -> None: + self.__client = client + + async def create_secret(self, secret: K8sSecret) -> K8sSecret: """Create a secret.""" - return self.client.create_namespaced_secret(namespace, body, **kwargs) - def patch_namespaced_secret(self, name: str, namespace: str, body: dict, **kwargs: dict) -> Any: + return K8sSecret.from_k8s_object(await self.__client.create(secret, False)) + + async def patch_secret(self, secret: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: """Patch a secret.""" - return self.client.patch_namespaced_secret(name, namespace, body, **kwargs) + return await self.__client.patch(secret, patch) + + async def delete_secret(self, secret: K8sObjectMeta) -> None: + """Delete a secret.""" -class K8sSchedulingClient(K8sSchedudlingClientInterface): # pragma:nocover + await self.__client.delete(secret) + + +class K8sSchedulingClient(PriorityClassClient): """Real k8s scheduling API client that exposes the required functions.""" def __init__(self) -> None: @@ -91,26 +106,40 @@ def __init__(self) -> None: config.load_config() self.client = client.SchedulingV1Api() - def create_priority_class(self, body: Any, **kwargs: Any) -> Any: + def create_priority_class(self, body: client.V1PriorityClass) -> client.V1PriorityClass: """Create a priority class.""" - return self.client.create_priority_class(body, **kwargs) + return self.client.create_priority_class(body) - def delete_priority_class(self, name: Any, **kwargs: Any) -> Any: + def delete_priority_class(self, name: str, body: client.V1DeleteOptions) -> None: """Delete a priority class.""" - return self.client.delete_priority_class(name, **kwargs) + try: + self.client.delete_priority_class(name, body=body) + except client.ApiException as e: + if e.status != 404: + # NOTE: The priorityclass is an owner of the resource quota so when the priority class is deleted the + # resource quota is also deleted. Also, we don't care if the thing we are trying to delete is not there + # we have the desired state so we can just go on. + raise + + def read_priority_class(self, name: str) -> client.V1PriorityClass | None: + """Get a priority class.""" + pc = None + with contextlib.suppress(client.ApiException): + pc = self.client.read_priority_class(name) + return pc def get_priority_class(self, name: Any, **kwargs: Any) -> Any: """Get a priority class.""" return self.client.read_priority_class(name, **kwargs) -class DummyCoreClient(K8sCoreClientInterface): +class DummyCoreClient(ResourceQuotaClient, SecretClient): """Dummy k8s core API client that does not require a k8s cluster. Not suitable for production - to be used only for testing and development. """ - def __init__(self, quotas: dict[str, client.V1ResourceQuota], secrets: dict[str, client.V1Secret]) -> None: + def __init__(self, quotas: dict[str, client.V1ResourceQuota], secrets: dict[str, K8sSecret]) -> None: self.quotas = quotas self.secrets = secrets self.__lock: LockType | None = None @@ -125,7 +154,7 @@ def _lock(self) -> multiprocessing.synchronize.Lock: self.__lock = Lock() return self.__lock - def read_namespaced_resource_quota(self, name: Any, namespace: Any, **kwargs: Any) -> Any: + def read_resource_quota(self, name: str, namespace: str) -> client.V1ResourceQuota: """Get a resource quota.""" with self._lock: quota = self.quotas.get(name) @@ -133,12 +162,12 @@ def read_namespaced_resource_quota(self, name: Any, namespace: Any, **kwargs: An raise client.ApiException(status=404) return quota - def list_namespaced_resource_quota(self, namespace: Any, **kwargs: Any) -> Any: + def list_resource_quota(self, namespace: str, label_selector: str) -> list[client.V1ResourceQuota]: """List resource quotas.""" with self._lock: - return client.V1ResourceQuotaList(items=list(self.quotas.values())) + return list(self.quotas.values()) - def create_namespaced_resource_quota(self, namespace: Any, body: Any, **kwargs: Any) -> Any: + def create_resource_quota(self, namespace: str, body: client.V1ResourceQuota) -> None: """Create a resource quota.""" with self._lock: if isinstance(body.metadata, dict): @@ -147,17 +176,13 @@ def create_namespaced_resource_quota(self, namespace: Any, body: Any, **kwargs: body.api_version = "v1" body.kind = "ResourceQuota" self.quotas[body.metadata.name] = body - return body - def delete_namespaced_resource_quota(self, name: Any, namespace: Any, **kwargs: Any) -> Any: + def delete_resource_quota(self, name: str, namespace: str) -> None: """Delete a resource quota.""" with self._lock: - removed_quota = self.quotas.pop(name, None) - if removed_quota is None: - raise client.ApiException(status=404) - return removed_quota + self.quotas.pop(name, None) - def patch_namespaced_resource_quota(self, name: Any, namespace: Any, body: Any, **kwargs: Any) -> Any: + def patch_resource_quota(self, name: str, namespace: str, body: client.V1ResourceQuota) -> None: """Update a resource quota.""" with self._lock: old_quota = self.quotas.get(name) @@ -169,36 +194,27 @@ def patch_namespaced_resource_quota(self, name: Any, namespace: Any, body: Any, if isinstance(body, dict): new_quota.spec = client.V1ResourceQuota(**body).spec self.quotas[name] = new_quota - return new_quota - def create_namespaced_secret(self, namespace: Any, body: Any, **kwargs: Any) -> Any: + async def create_secret(self, secret: K8sSecret) -> K8sSecret: """Create a secret.""" with self._lock: - if isinstance(body.metadata, dict): - body.metadata = client.V1ObjectMeta(**body.metadata) - body.metadata.uid = uuid4() - body.api_version = "v1" - body.kind = "Secret" - self.secrets[body.metadata.name] = body - return body + secret.manifest.metadata.uid = uuid4() + self.secrets[secret.name] = secret + return secret - def patch_namespaced_secret(self, name: Any, namespace: Any, body: Any, **kwargs: Any) -> Any: + async def patch_secret(self, secret: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: """Patch a secret.""" # NOTE: This is only needed if the create_namespaced_secret can raise a conflict 409 status code # error when it tries to create a secret that already exists. But the dummy client never raises # this so we don't need to implement it (for now). raise NotImplementedError() - def delete_namespaced_secret(self, name: Any, namespace: Any, **kwargs: Any) -> Any: + async def delete_secret(self, secret: K8sObjectMeta) -> None: """Delete a secret.""" - with self._lock: - removed_secret = self.secrets.pop(name, None) - if removed_secret is None: - raise client.ApiException(status=404) - return removed_secret + raise NotImplementedError() -class DummySchedulingClient(K8sSchedudlingClientInterface): +class DummySchedulingClient(PriorityClassClient): """Dummy k8s scheduling API client that does not require a k8s cluster. Not suitable for production - to be used only for testing and development. @@ -218,7 +234,7 @@ def _lock(self) -> multiprocessing.synchronize.Lock: self.__lock = Lock() return self.__lock - def create_priority_class(self, body: Any, **kwargs: Any) -> Any: + def create_priority_class(self, body: client.V1PriorityClass) -> client.V1PriorityClass: """Create a priority class.""" with self._lock: if isinstance(body.metadata, dict): @@ -229,28 +245,29 @@ def create_priority_class(self, body: Any, **kwargs: Any) -> Any: self.pcs[body.metadata.name] = body return body - def delete_priority_class(self, name: Any, **kwargs: Any) -> Any: - """Delete a priority class.""" - with self._lock: - removed_pc = self.pcs.pop(name, None) - if removed_pc is None: - raise client.ApiException(status=404) - return removed_pc - - def get_priority_class(self, name: Any, **kwargs: Any) -> Any: + def read_priority_class(self, name: str) -> client.V1PriorityClass | None: """Get a priority class.""" with self._lock: return self.pcs.get(name, None) + def delete_priority_class(self, name: str, body: client.V1DeleteOptions) -> None: + """Delete a priority class.""" + with self._lock: + self.pcs.pop(name, None) + -class K8sClusterClient: +class K8sClusterClient(K8sClient): """A wrapper around a kr8s k8s client, acts on all resources of a cluster.""" - def __init__(self, cluster: Cluster) -> None: + def __init__(self, cluster: ClusterConnection) -> None: self.__cluster = cluster assert self.__cluster.api is not None - def get_cluster(self) -> Cluster: + def __lt__(self, other: K8sClusterClient) -> bool: + """Allows for sorting.""" + return self.__cluster.id < other.__cluster.id and self.__cluster.namespace < other.__cluster.namespace + + def get_cluster(self) -> ClusterConnection: """Return a cluster object.""" return self.__cluster @@ -278,13 +295,17 @@ async def __list(self, _filter: K8sObjectFilter) -> AsyncIterable[APIObjectInClu async def __get_api_object(self, meta: K8sObjectFilter) -> APIObjectInCluster | None: return await anext(aiter(self.__list(meta)), None) - async def create(self, obj: K8sObject) -> K8sObject: + async def create(self, obj: K8sObject, refresh: bool) -> K8sObject: """Create the k8s object.""" api_obj = obj.to_api_object(self.__cluster.api) await api_obj.create() - # if refresh isn't called, status and timestamp will be blank - await api_obj.refresh() + + # In some cases the service account does not have read rights, in which case we cannot call get(), and refresh() + if refresh: + # if refresh isn't called, status and timestamp will be blank + await api_obj.refresh() + return obj.with_manifest(api_obj.to_dict()) async def patch(self, meta: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: @@ -324,23 +345,23 @@ async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: yield r.to_k8s_object() -class K8SCachedClusterClient(K8sClusterClient): +class K8sCachedClusterClient(K8sClusterClient): """A wrapper around a kr8s k8s client. Provides access to a cache for listing and reading resources but fallback to the cluster for other operations. """ - def __init__(self, cluster: Cluster, cache: K8sDbCache, kinds_to_cache: list[GVK]) -> None: + def __init__(self, cluster: ClusterConnection, cache: K8sDbCache, kinds_to_cache: list[GVK]) -> None: super().__init__(cluster) self.__cache = cache self.__kinds_to_cache = set(kinds_to_cache) - async def create(self, obj: K8sObject) -> K8sObject: + async def create(self, obj: K8sObject, refresh: bool) -> K8sObject: """Create the k8s object.""" if obj.gvk in self.__kinds_to_cache: await self.__cache.upsert(obj) try: - obj = await super().create(obj) + obj = await super().create(obj, refresh) except: # if there was an error creating the k8s object, we delete it from the db again to not have ghost entries if obj.gvk in self.__kinds_to_cache: @@ -390,39 +411,22 @@ async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: yield res -class K8sClusterClientsPool: - """A wrapper around a kr8s k8s client, acts on all resources over many clusters.""" +class K8sClusterClientsPool(K8sClient): + """A wrapper around a pool of kr8s k8s clients.""" - def __init__( - self, cache: K8sDbCache, kinds_to_cache: list[GVK], get_clusters: Coroutine[Any, Any, list[Cluster]] - ) -> None: - self.__clients: dict[ClusterId, K8sClusterClient] | None = None - self.__cache = cache - self.__kinds_to_cache = kinds_to_cache - self.__get_clusters = get_clusters - self.__lock = asyncio.Lock() + def __init__(self, clusters: AsyncIterable[K8sClusterClient]) -> None: + self.__clusters = clusters + self.__clients: dict[ClusterId, K8sClusterClient] = {} - async def __load(self) -> None: - # Avoid trying to take a lock when we have loaded the dictionary (99% of the time) - if self.__clients is not None: + async def __init_clients_if_needed(self) -> None: + if len(self.__clients) > 0: return - - async with self.__lock: - # We know it was none before getting the lock, but we might have been preempted by another coroutine which - # could have done the job by now, so check again, if still not set, load the value, otherwise we are done - if self.__clients is None: - clusters: list[Cluster] = await self.__get_clusters - self.__clients = { - c.id: K8SCachedClusterClient(c, self.__cache, self.__kinds_to_cache) for c in clusters - } + async for cluster in self.__clusters: + self.__clients[cluster.get_cluster().id] = cluster async def __get_client_or_die(self, cluster_id: ClusterId) -> K8sClusterClient: - cluster_client = None - if self.__clients is None: - await self.__load() - - if self.__clients is not None: - cluster_client = self.__clients.get(cluster_id) + await self.__init_clients_if_needed() + cluster_client = self.__clients.get(cluster_id) if cluster_client is None: raise errors.MissingResourceError( @@ -430,42 +434,35 @@ async def __get_client_or_die(self, cluster_id: ClusterId) -> K8sClusterClient: ) return cluster_client - def cluster_by_id(self, cluster_id: ClusterId) -> Cluster: + async def cluster_by_id(self, cluster_id: ClusterId) -> ClusterConnection: """Return a cluster by its id.""" - _client = None - if self.__clients is not None: - _client = self.__clients.get(cluster_id) - - if _client is not None: - return _client.get_cluster() - - raise errors.MissingResourceError( - message=f"Could not find cluster with id {cluster_id} in the list of clusters." - ) + client = await self.__get_client_or_die(cluster_id) + return client.get_cluster() - async def create(self, obj: K8sObject) -> K8sObject: + async def create(self, obj: K8sObject, refresh: bool = True) -> K8sObject: """Create the k8s object.""" - return await (await self.__get_client_or_die(obj.cluster)).create(obj) + client = await self.__get_client_or_die(obj.cluster) + return await client.create(obj, refresh) async def patch(self, meta: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: """Patch a k8s object.""" - return await (await self.__get_client_or_die(meta.cluster)).patch(meta, patch) + client = await self.__get_client_or_die(meta.cluster) + return await client.patch(meta, patch) async def delete(self, meta: K8sObjectMeta) -> None: """Delete a k8s object.""" - await (await self.__get_client_or_die(meta.cluster)).delete(meta) + client = await self.__get_client_or_die(meta.cluster) + await client.delete(meta) async def get(self, meta: K8sObjectMeta) -> K8sObject | None: """Get a specific k8s object, None is returned if the object does not exist.""" - return await (await self.__get_client_or_die(meta.cluster)).get(meta) + client = await self.__get_client_or_die(meta.cluster) + return await client.get(meta) async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: """List all k8s objects.""" - if self.__clients is None: - await self.__load() - - if self.__clients is not None: - cluster_clients = [v for v in self.__clients.values()] - for c in cluster_clients: - async for r in c.list(_filter): - yield r + await self.__init_clients_if_needed() + cluster_clients = sorted(list(self.__clients.values())) + for c in cluster_clients: + async for r in c.list(_filter): + yield r diff --git a/components/renku_data_services/k8s/config.py b/components/renku_data_services/k8s/config.py index 0e522f073..43afd9ab4 100644 --- a/components/renku_data_services/k8s/config.py +++ b/components/renku_data_services/k8s/config.py @@ -1,15 +1,19 @@ """Base config for k8s.""" import os +from collections.abc import AsyncIterable, Awaitable +import aiofiles import kr8s import yaml -import renku_data_services.k8s.constants from renku_data_services.app_config import logging from renku_data_services.crc.db import ClusterRepository +from renku_data_services.errors import errors from renku_data_services.k8s import models as k8s_models +from renku_data_services.k8s.clients import K8sCachedClusterClient, K8sClusterClient from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER +from renku_data_services.k8s.db import K8sDbCache logger = logging.getLogger(__name__) @@ -31,20 +35,21 @@ def __init__( self._sa = sa self._url = url - def _sync_api(self) -> kr8s.Api | kr8s._AsyncApi: + def sync_api(self) -> kr8s.Api: + """Instantiate the sync Kr8s Api object based on the configuration.""" return kr8s.api( kubeconfig=self._kubeconfig, namespace=self._ns, context=self._current_context_name, ) - def _async_api(self) -> kr8s.asyncio.Api: + def _async_api(self) -> Awaitable[kr8s.asyncio.Api]: """Create an async api client from sync code. Kr8s cannot return an AsyncAPI instance from sync code, and we can't easily make all our config code async, so this method is a direct copy of the kr8s sync client code, just that it returns an async client. """ - ret = kr8s._async_utils.run_sync(kr8s.asyncio.api)( + return kr8s.asyncio.api( url=self._url, kubeconfig=self._kubeconfig, serviceaccount=self._sa, @@ -52,15 +57,10 @@ def _async_api(self) -> kr8s.asyncio.Api: context=self._current_context_name, _asyncio=True, # This is the only line that is different from kr8s code ) - assert isinstance(ret, kr8s.asyncio.Api) - return ret - def api(self, _async: bool = True) -> kr8s.Api | kr8s._AsyncApi: - """Instantiate the Kr8s Api object based on the configuration.""" - if _async: - return self._async_api() - else: - return self._sync_api() + def api(self) -> Awaitable[kr8s.asyncio.Api]: + """Instantiate the async Kr8s Api object based on the configuration.""" + return self._async_api() class KubeConfigEnv(KubeConfig): @@ -70,48 +70,74 @@ def __init__(self) -> None: super().__init__(ns=os.environ.get("K8S_NAMESPACE", "default")) -class KubeConfigYaml(KubeConfig): - """Get a kube config from a yaml file.""" +async def from_kubeconfig_file(kubeconfig_path: str) -> KubeConfig: + """Generate a config from a kubeconfig file.""" - def __init__(self, kubeconfig: str) -> None: - super().__init__(kubeconfig=kubeconfig) + async with aiofiles.open(kubeconfig_path) as stream: + kubeconfig_contents = await stream.read() - with open(kubeconfig) as stream: - _conf = yaml.safe_load(stream) + conf = yaml.safe_load(kubeconfig_contents) + if not isinstance(conf, dict): + raise errors.ConfigurationError(message=f"The kubeconfig {kubeconfig_path} is empty or has a bad format.") - self._current_context_name = _conf.get("current-context", None) - if self._current_context_name is not None: - for context in _conf.get("contexts", []): - name = context.get("name", None) - inner = context.get("context", None) - if inner is not None and name is not None and name == self._current_context_name: - self._ns = inner.get("namespace", None) - break + current_context_name = conf.get("current-context", None) + ns = None + if current_context_name is not None: + for context in conf.get("contexts", []): + if not isinstance(context, dict): + continue + name = context.get("name", None) + inner = context.get("context", None) + if inner is not None and name == current_context_name: + ns = inner.get("namespace", None) + break + + return KubeConfig(kubeconfig_path, current_context_name=current_context_name, ns=ns) async def get_clusters( - kube_conf_root_dir: str, namespace: str, api: kr8s.asyncio.Api, cluster_rp: ClusterRepository -) -> list[k8s_models.Cluster]: + kube_conf_root_dir: str, + default_kubeconfig: KubeConfig, + cluster_repo: ClusterRepository, + cache: K8sDbCache | None = None, + kinds_to_cache: list[k8s_models.GVK] | None = None, +) -> AsyncIterable[K8sClusterClient]: """Get all clusters accessible to the application.""" - - clusters = [k8s_models.Cluster(id=DEFAULT_K8S_CLUSTER, namespace=namespace, api=api)] + default_api = await default_kubeconfig.api() + cluster_connection = k8s_models.ClusterConnection( + id=DEFAULT_K8S_CLUSTER, namespace=default_api.namespace, api=default_api + ) + if cache is None or kinds_to_cache is None: + yield K8sClusterClient(cluster_connection) + else: + yield K8sCachedClusterClient(cluster_connection, cache, kinds_to_cache) if not os.path.exists(kube_conf_root_dir): logger.warning(f"Cannot open directory '{kube_conf_root_dir}', ignoring kube configs...") - return clusters + return - async for db_cluster in cluster_rp.select_all(): - filename = db_cluster.config_name + async for cluster_db in cluster_repo.select_all(): + filename = cluster_db.config_name + logger.info(f"Trying to load Kubernetes config: '{kube_conf_root_dir}/{filename}'") try: - kube_config = KubeConfigYaml(f"{kube_conf_root_dir}/{filename}") - cluster = k8s_models.Cluster( - id=renku_data_services.k8s.constants.ClusterId(str(db_cluster.id)), - namespace=kube_config.api().namespace, - api=kube_config.api(), + logger.info(f"Reading: '{kube_conf_root_dir}/{filename}'") + kube_config = await from_kubeconfig_file(f"{kube_conf_root_dir}/{filename}") + logger.info(f"Creating API for '{kube_conf_root_dir}/{filename}'") + k8s_api = await kube_config.api() + logger.info(f"Creating cluster connection for '{kube_conf_root_dir}/{filename}'") + cluster_connection = k8s_models.ClusterConnection( + id=cluster_db.id, + namespace=k8s_api.namespace, + api=await k8s_api, ) - clusters.append(cluster) + if cache is None or kinds_to_cache is None: + logger.info(f"Creating k8s client for '{kube_conf_root_dir}/{filename}'") + cluster = K8sClusterClient(cluster_connection) + else: + logger.info(f"Creating cached k8s client for '{kube_conf_root_dir}/{filename}'") + cluster = K8sCachedClusterClient(cluster_connection, cache, kinds_to_cache) + logger.info(f"Successfully loaded Kubernetes config: '{kube_conf_root_dir}/{filename}'") + yield cluster except Exception as e: logger.warning(f"Failed while loading '{kube_conf_root_dir}/{filename}', ignoring kube config. Error: {e}") - - return clusters diff --git a/components/renku_data_services/k8s/constants.py b/components/renku_data_services/k8s/constants.py index 50c06e0e8..1215e2fc5 100644 --- a/components/renku_data_services/k8s/constants.py +++ b/components/renku_data_services/k8s/constants.py @@ -4,10 +4,13 @@ from typing import Final, NewType -# LSA Not enough time: Adapt this to be an alias to ULID -ClusterId = NewType("ClusterId", str) +from ulid import ULID -DEFAULT_K8S_CLUSTER: Final[ClusterId] = ClusterId("0RENK1RENK2RENK3RENK4RENK5") # This has to be a valid ULID +ClusterId = NewType("ClusterId", ULID) + +DEFAULT_K8S_CLUSTER: Final[ClusterId] = ClusterId( + ULID.from_str("0RENK1RENK2RENK3RENK4RENK5") +) # This has to be a valid ULID DUMMY_TASK_RUN_USER_ID: Final[str] = "DummyTaskRunUser" """The user id to use for TaskRuns in the k8s cache. diff --git a/components/renku_data_services/k8s/db.py b/components/renku_data_services/k8s/db.py new file mode 100644 index 000000000..1c78e0585 --- /dev/null +++ b/components/renku_data_services/k8s/db.py @@ -0,0 +1,231 @@ +"""K8s watcher database and k8s wrappers.""" + +from __future__ import annotations + +from collections.abc import AsyncIterable, Callable +from dataclasses import dataclass, field +from typing import Optional + +import sqlalchemy +from kubernetes import client +from kubernetes.utils import parse_quantity +from sqlalchemy import Select, bindparam, select +from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.asyncio import AsyncSession + +from renku_data_services.crc import models +from renku_data_services.errors import errors +from renku_data_services.k8s.client_interfaces import PriorityClassClient, ResourceQuotaClient +from renku_data_services.k8s.models import K8sObject, K8sObjectFilter, K8sObjectMeta +from renku_data_services.k8s.orm import K8sObjectORM + + +class K8sDbCache: + """Caching k8s objects in postgres.""" + + def __init__(self, session_maker: Callable[..., AsyncSession]) -> None: + self.__session_maker = session_maker + + @staticmethod + def __get_where_clauses(_filter: K8sObjectFilter) -> Select[tuple[K8sObjectORM]]: + stmt = select(K8sObjectORM) + if _filter.name is not None: + stmt = stmt.where(K8sObjectORM.name == _filter.name) + if _filter.namespace is not None: + stmt = stmt.where(K8sObjectORM.namespace == _filter.namespace) + if _filter.cluster is not None: + stmt = stmt.where(K8sObjectORM.cluster == str(_filter.cluster)) + if _filter.gvk is not None: + stmt = stmt.where(K8sObjectORM.kind_insensitive == _filter.gvk.kind) + stmt = stmt.where(K8sObjectORM.version_insensitive == _filter.gvk.version) + if _filter.gvk.group is None: + stmt = stmt.where(K8sObjectORM.group.is_(None)) + else: + stmt = stmt.where(K8sObjectORM.group_insensitive == _filter.gvk.group) + if _filter.user_id is not None: + stmt = stmt.where(K8sObjectORM.user_id == _filter.user_id) + if _filter.label_selector is not None: + stmt = stmt.where( + # K8sObjectORM.manifest.comparator.contains({"metadata": {"labels": filter.label_selector}}) + sqlalchemy.text("manifest -> 'metadata' -> 'labels' @> :labels").bindparams( + bindparam("labels", _filter.label_selector, type_=JSONB) + ) + ) + return stmt + + async def __get(self, meta: K8sObjectMeta, session: AsyncSession) -> K8sObjectORM | None: + stmt = self.__get_where_clauses(meta.to_filter()) + obj_orm = await session.scalar(stmt) + return obj_orm + + async def upsert(self, obj: K8sObject) -> None: + """Insert or update an object in the cache.""" + if obj.user_id is None: + raise errors.ValidationError(message="user_id is required to upsert k8s object.") + async with self.__session_maker() as session, session.begin(): + obj_orm = await self.__get(obj, session) + if obj_orm is not None: + obj_orm.manifest = obj.manifest + await session.commit() + await session.flush() + return + obj_orm = K8sObjectORM( + name=obj.name, + namespace=obj.namespace or "default", + group=obj.gvk.group, + kind=obj.gvk.kind, + version=obj.gvk.version, + manifest=obj.manifest.to_dict(), + cluster=obj.cluster, + user_id=obj.user_id, + ) + session.add(obj_orm) + await session.commit() + await session.flush() + return + + async def delete(self, meta: K8sObjectMeta) -> None: + """Delete an object from the cache.""" + async with self.__session_maker() as session, session.begin(): + obj_orm = await self.__get(meta, session) + if obj_orm is not None: + await session.delete(obj_orm) + + async def get(self, meta: K8sObjectMeta) -> K8sObject | None: + """Get a single object from the cache.""" + async with self.__session_maker() as session, session.begin(): + obj_orm = await self.__get(meta, session) + if obj_orm is not None: + return meta.with_manifest(obj_orm.manifest) + + return None + + async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: + """List objects from the cache.""" + async with self.__session_maker() as session, session.begin(): + stmt = self.__get_where_clauses(_filter) + async for res in await session.stream_scalars(stmt): + yield res.dump() + + +@dataclass +class QuotaRepository: + """Adapter for CRUD operations on resource quotas and priority classes in k8s.""" + + rq_client: ResourceQuotaClient + pc_client: PriorityClassClient + namespace: str = "default" + _label_name: str = field(init=False, default="app") + _label_value: str = field(init=False, default="renku") + + def _quota_from_manifest(self, manifest: client.V1ResourceQuota) -> models.Quota: + gpu = 0 + gpu_kind = models.GpuKind.NVIDIA + for igpu_kind in models.GpuKind: + key = f"requests.{igpu_kind}/gpu" + if key in manifest.spec.hard: + gpu = int(manifest.spec.hard.get(key)) + gpu_kind = igpu_kind + break + memory_raw = manifest.spec.hard.get("requests.memory") + if memory_raw is None: + raise errors.ValidationError( + message="Kubernetes resource quota with missing hard.requests.memory is not supported" + ) + cpu_raw = manifest.spec.hard.get("requests.cpu") + if cpu_raw is None: + raise errors.ValidationError( + message="Kubernetes resource quota with missing hard.requests.cpu is not supported" + ) + return models.Quota( + cpu=float(parse_quantity(cpu_raw)), + memory=round(parse_quantity(memory_raw) / 1_000_000_000), + gpu=gpu, + gpu_kind=gpu_kind, + id=manifest.metadata.name, + ) + + def _quota_to_manifest(self, quota: models.Quota) -> client.V1ResourceQuota: + if quota.id is None: + raise errors.ValidationError(message="The id of a quota has to be set when it is created.") + return client.V1ResourceQuota( + metadata=client.V1ObjectMeta(labels={self._label_name: self._label_value}, name=quota.id), + spec=client.V1ResourceQuotaSpec( + hard={ + "requests.cpu": quota.cpu, + "requests.memory": str(quota.memory * 1_000_000_000), + f"requests.{quota.gpu_kind}/gpu": quota.gpu, + }, + scope_selector=client.V1ScopeSelector( + match_expressions=[{"operator": "In", "scopeName": "PriorityClass", "values": [quota.id]}] + ), + ), + ) + + def get_quota(self, name: str | None) -> Optional[models.Quota]: + """Get a specific quota by name.""" + if not name: + return None + try: + res_quota = self.rq_client.read_resource_quota(name=name, namespace=self.namespace) + except client.ApiException as e: + if e.status == 404: + return None + raise + return self._quota_from_manifest(res_quota) + + def get_quotas(self, name: Optional[str] = None) -> list[models.Quota]: + """Get a specific resource quota.""" + if name is not None: + quota = self.get_quota(name) + return [quota] if quota is not None else [] + quotas = self.rq_client.list_resource_quota( + namespace=self.namespace, label_selector=f"{self._label_name}={self._label_value}" + ) + return [self._quota_from_manifest(q) for q in quotas] + + def create_quota(self, quota: models.Quota) -> models.Quota: + """Create a resource quota and priority class.""" + + metadata = {"labels": {self._label_name: self._label_value}, "name": quota.id} + quota_manifest = self._quota_to_manifest(quota) + + # Check if we have a priority class with the given name, return it or create one otherwise. + pc = self.pc_client.read_priority_class(quota.id) + if pc is None: + pc = self.pc_client.create_priority_class( + client.V1PriorityClass( + global_default=False, + value=100, + preemption_policy="Never", + description="Renku resource quota priority class", + metadata=client.V1ObjectMeta(**metadata), + ), + ) + + # NOTE: The priority class is cluster-scoped and a namespace-scoped resource cannot be an owner + # of a cluster-scoped resource. That is why the priority class is an owner of the quota. + quota_manifest.owner_references = [ + client.V1OwnerReference( + api_version=pc.api_version, + block_owner_deletion=True, + controller=False, + kind=pc.kind, + name=pc.metadata.name, + uid=pc.metadata.uid, + ) + ] + self.rq_client.create_resource_quota(self.namespace, quota_manifest) + return quota + + def delete_quota(self, name: str) -> None: + """Delete a resource quota and priority class.""" + self.pc_client.delete_priority_class(name=name, body=client.V1DeleteOptions(propagation_policy="Foreground")) + self.rq_client.delete_resource_quota(name=name, namespace=self.namespace) + + def update_quota(self, quota: models.Quota) -> models.Quota: + """Update a specific resource quota.""" + + quota_manifest = self._quota_to_manifest(quota) + self.rq_client.patch_resource_quota(name=quota.id, namespace=self.namespace, body=quota_manifest) + return quota diff --git a/components/renku_data_services/k8s/models.py b/components/renku_data_services/k8s/models.py index 07f583e82..9fade6dbf 100644 --- a/components/renku_data_services/k8s/models.py +++ b/components/renku_data_services/k8s/models.py @@ -3,21 +3,19 @@ from __future__ import annotations from dataclasses import dataclass -from typing import TYPE_CHECKING, Any, Self, cast +from typing import Any, Self, cast +import kubernetes from box import Box from kr8s._api import Api from kr8s.asyncio.objects import APIObject -from ulid import ULID +from kr8s.objects import Secret +from kubernetes_asyncio.client import V1Secret -from renku_data_services.base_models import APIUser -from renku_data_services.errors import MissingResourceError, errors +from renku_data_services.errors import errors from renku_data_services.k8s.constants import DUMMY_TASK_RUN_USER_ID, ClusterId -from renku_data_services.notebooks.cr_amalthea_session import TlsSecret -if TYPE_CHECKING: - from renku_data_services.crc.db import ClusterRepository - from renku_data_services.notebooks.config.dynamic import _SessionIngress +sanitizer = kubernetes.client.ApiClient().sanitize_for_serialization class K8sObjectMeta: @@ -63,7 +61,7 @@ def to_filter(self) -> K8sObjectFilter: def __repr__(self) -> str: return ( - f"K8sObject(name={self.name}, namespace={self.namespace}, cluster={self.cluster}, " + f"{self.__class__.__name__}(name={self.name}, namespace={self.namespace}, cluster={self.cluster}, " f"gvk={self.gvk}, user_id={self.user_id})" ) @@ -85,7 +83,10 @@ def __init__( self.manifest = manifest def __repr__(self) -> str: - return super().__repr__() + return ( + f"{self.__class__.__name__}(name={self.name}, namespace={self.namespace}, cluster={self.cluster}, " + f"gvk={self.gvk}, manifest={self.manifest}, user_id={self.user_id})" + ) def to_api_object(self, api: Api) -> APIObject: """Convert a regular k8s object to an api object for kr8s.""" @@ -105,6 +106,62 @@ class _APIObj(APIObject): return _APIObj(resource=self.manifest, namespace=self.namespace, api=api) +class K8sSecret(K8sObject): + """Represents a secret in k8s.""" + + def __init__( + self, + name: str, + namespace: str, + cluster: ClusterId, + gvk: GVK, + manifest: Box, + user_id: str | None = None, + namespaced: bool = True, + ) -> None: + super().__init__(name, namespace, cluster, gvk, manifest, user_id, namespaced) + + def __repr__(self) -> str: + # We hide the manifest to prevent leaking secrets + return ( + f"{self.__class__.__name__}(name={self.name}, namespace={self.namespace}, cluster={self.cluster}, " + f"gvk={self.gvk}, user_id={self.user_id})" + ) + + @classmethod + def from_k8s_object(cls, k8s_object: K8sObject) -> K8sSecret: + """Convert a k8s object to a K8sSecret object.""" + return K8sSecret( + name=k8s_object.name, + namespace=k8s_object.namespace, + cluster=k8s_object.cluster, + gvk=k8s_object.gvk, + manifest=k8s_object.manifest, + ) + + @classmethod + def from_v1_secret(cls, secret: V1Secret, cluster: ClusterConnection) -> K8sSecret: + """Convert a V1Secret object to a K8sSecret object.""" + assert secret.metadata is not None + + return K8sSecret( + name=secret.metadata.name, + namespace=cluster.namespace, + cluster=cluster.id, + gvk=GVK(group="core", version=Secret.version, kind="Secret"), + manifest=Box(sanitizer(secret)), + ) + + def to_v1_secret(self) -> V1Secret: + """Convert a K8sSecret to a V1Secret object.""" + return V1Secret( + metadata=self.manifest.metadata, + data=self.manifest.get("data", {}), + string_data=self.manifest.get("stringData", {}), + type=self.manifest.get("type"), + ) + + @dataclass class K8sObjectFilter: """Parameters used when filtering resources from the cache or k8s.""" @@ -117,9 +174,9 @@ class K8sObjectFilter: user_id: str | None = None -@dataclass(eq=True, frozen=True) -class Cluster: - """Representation of a k8s cluster.""" +@dataclass(frozen=True, eq=True, kw_only=True) +class ClusterConnection: + """K8s Cluster wrapper.""" id: ClusterId namespace: str @@ -129,48 +186,6 @@ def with_api_object(self, obj: APIObject) -> APIObjectInCluster: """Create an API object associated with the cluster.""" return APIObjectInCluster(obj, self.id) - async def get_storage_class( - self, user: APIUser, cluster_repo: ClusterRepository, default_storage_class: str | None - ) -> str | None: - """Get the default storage class for the cluster.""" - try: - cluster = await cluster_repo.select(user, ULID.from_str(self.id)) - storage_class = cluster.session_storage_class - except (MissingResourceError, ValueError) as _e: - storage_class = default_storage_class - - return storage_class - - async def get_ingress_parameters( - self, user: APIUser, cluster_repo: ClusterRepository, main_ingress: _SessionIngress, server_name: str - ) -> tuple[str, str, str, str, TlsSecret | None, dict[str, str]]: - """Returns the ingress parameters of the cluster.""" - tls_name = None - - try: - cluster = await cluster_repo.select(user, ULID.from_str(self.id)) - - host = cluster.session_host - base_server_path = f"{cluster.session_path}/{server_name}" - base_server_url = f"{cluster.session_protocol.value}://{host}:{cluster.session_port}{base_server_path}" - base_server_https_url = base_server_url - tls_name = cluster.session_tls_secret_name - ingress_annotations = cluster.session_ingress_annotations - except (MissingResourceError, ValueError) as _e: - # Fallback to global, main cluster parameters - host = main_ingress.host - base_server_path = main_ingress.base_path(server_name) - base_server_url = main_ingress.base_url(server_name) - base_server_https_url = main_ingress.base_url(server_name, force_https=True) - ingress_annotations = main_ingress.annotations - - if main_ingress.tls_secret is not None: - tls_name = main_ingress.tls_secret - - tls_secret = None if tls_name is None else TlsSecret(adopt=False, name=tls_name) - - return base_server_path, base_server_url, base_server_https_url, host, tls_secret, ingress_annotations - @dataclass(kw_only=True, frozen=True) class GVK: @@ -228,14 +243,14 @@ class APIObjectInCluster: @property def user_id(self) -> str | None: """Extract the user id from annotations.""" + labels = cast(dict[str, str], self.obj.metadata.get("labels", {})) match self.obj.singular: case "jupyterserver": - return cast(str, self.obj.metadata.labels["renku.io/userId"]) + return labels.get("renku.io/userId", None) case "amaltheasession": - return cast(str, self.obj.metadata.labels["renku.io/safe-username"]) + return labels.get("renku.io/safe-username", None) case "buildrun": - return cast(str, self.obj.metadata.labels["renku.io/safe-username"]) - + return labels.get("renku.io/safe-username", None) case "taskrun": return DUMMY_TASK_RUN_USER_ID case _: diff --git a/components/renku_data_services/k8s_watcher/orm.py b/components/renku_data_services/k8s/orm.py similarity index 92% rename from components/renku_data_services/k8s_watcher/orm.py rename to components/renku_data_services/k8s/orm.py index 5ff94c390..9e7725744 100644 --- a/components/renku_data_services/k8s_watcher/orm.py +++ b/components/renku_data_services/k8s/orm.py @@ -6,7 +6,7 @@ from typing import Any from box import Box -from sqlalchemy import ColumnElement, DateTime, MetaData, String, func, text +from sqlalchemy import ColumnElement, DateTime, MetaData, String, UniqueConstraint, func, text from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.ext.hybrid import Comparator, hybrid_property from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column @@ -47,6 +47,17 @@ class K8sObjectORM(BaseORM): """Representation of a k8s resource.""" __tablename__ = "k8s_objects" + __table_args__ = ( + UniqueConstraint( + "group", + "version", + "kind", + "cluster", + "namespace", + "name", + name="_unique_common_k8s_objects_gvk_cluster_namespace_name", + ), + ) id: Mapped[ULID] = mapped_column( "id", @@ -56,7 +67,7 @@ class K8sObjectORM(BaseORM): default_factory=lambda: str(ULID()), server_default=text("generate_ulid()"), ) - name: Mapped[str] = mapped_column("name", String(), index=True, unique=True) + name: Mapped[str] = mapped_column("name", String(), index=True) namespace: Mapped[str] = mapped_column("namespace", String(), index=True) creation_date: Mapped[datetime] = mapped_column( "creation_date", @@ -78,7 +89,7 @@ class K8sObjectORM(BaseORM): group: Mapped[str | None] = mapped_column(index=True, nullable=True) version: Mapped[str] = mapped_column(index=True) kind: Mapped[str] = mapped_column(index=True) - cluster: Mapped[str] = mapped_column(index=True) + cluster: Mapped[ULID] = mapped_column(ULIDType, index=True) user_id: Mapped[str] = mapped_column(String(), index=True) @hybrid_property diff --git a/components/renku_data_services/k8s/quota.py b/components/renku_data_services/k8s/quota.py deleted file mode 100644 index e12abda8b..000000000 --- a/components/renku_data_services/k8s/quota.py +++ /dev/null @@ -1,149 +0,0 @@ -"""The adapter used to create/delete/update/get resource quotas and priority classes in k8s.""" - -from contextlib import suppress -from dataclasses import dataclass, field -from typing import Optional - -from kubernetes import client -from kubernetes.utils.quantity import parse_quantity - -from renku_data_services import errors -from renku_data_services.crc import models -from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface, K8sSchedudlingClientInterface - - -@dataclass -class QuotaRepository: - """Adapter for CRUD operations on resource quotas and priority classes in k8s.""" - - core_client: K8sCoreClientInterface - scheduling_client: K8sSchedudlingClientInterface - namespace: str = "default" - _label_name: str = field(init=False, default="app") - _label_value: str = field(init=False, default="renku") - - def _quota_from_manifest(self, manifest: client.V1ResourceQuota) -> models.Quota: - gpu = 0 - gpu_kind = models.GpuKind.NVIDIA - for igpu_kind in models.GpuKind: - key = f"requests.{igpu_kind}/gpu" - if key in manifest.spec.hard: - gpu = int(manifest.spec.hard.get(key)) - gpu_kind = igpu_kind - break - memory_raw = manifest.spec.hard.get("requests.memory") - if memory_raw is None: - raise errors.ValidationError( - message="Kubernetes resource quota with missing hard.requests.memory is not supported" - ) - cpu_raw = manifest.spec.hard.get("requests.cpu") - if cpu_raw is None: - raise errors.ValidationError( - message="Kubernetes resource quota with missing hard.requests.cpu is not supported" - ) - return models.Quota( - cpu=float(parse_quantity(cpu_raw)), - memory=round(parse_quantity(memory_raw) / 1_000_000_000), - gpu=gpu, - gpu_kind=gpu_kind, - id=manifest.metadata.name, - ) - - def _quota_to_manifest(self, quota: models.Quota) -> client.V1ResourceQuota: - if quota.id is None: - raise errors.ValidationError(message="The id of a quota has to be set when it is created.") - return client.V1ResourceQuota( - metadata=client.V1ObjectMeta(labels={self._label_name: self._label_value}, name=quota.id), - spec=client.V1ResourceQuotaSpec( - hard={ - "requests.cpu": quota.cpu, - "requests.memory": str(quota.memory * 1_000_000_000), - f"requests.{quota.gpu_kind}/gpu": quota.gpu, - }, - scope_selector=client.V1ScopeSelector( - match_expressions=[{"operator": "In", "scopeName": "PriorityClass", "values": [quota.id]}] - ), - ), - ) - - def get_quota(self, name: str | None) -> Optional[models.Quota]: - """Get a specific quota by name.""" - if not name: - return None - try: - res_quota: client.V1ResourceQuota = self.core_client.read_namespaced_resource_quota( - name=name, namespace=self.namespace - ) - except client.ApiException as e: - if e.status == 404: - return None - raise - return self._quota_from_manifest(res_quota) - - def get_quotas(self, name: Optional[str] = None) -> list[models.Quota]: - """Get a specific resource quota.""" - if name is not None: - quota = self.get_quota(name) - return [quota] if quota is not None else [] - quotas = self.core_client.list_namespaced_resource_quota( - namespace=self.namespace, label_selector=f"{self._label_name}={self._label_value}" - ) - return [self._quota_from_manifest(q) for q in quotas.items] - - def create_quota(self, quota: models.Quota) -> models.Quota: - """Create a resource quota and priority class.""" - - metadata = {"labels": {self._label_name: self._label_value}, "name": quota.id} - quota_manifest = self._quota_to_manifest(quota) - - # LSA Check if we have a priority class with the given name, return it or create one otherwise. - pc: client.V1PriorityClass | None = None - with suppress(client.ApiException): - pc = self.scheduling_client.get_priority_class(quota.id) - if pc is None: - pc = self.scheduling_client.create_priority_class( - client.V1PriorityClass( - global_default=False, - value=100, - preemption_policy="Never", - description="Renku resource quota priority class", - metadata=client.V1ObjectMeta(**metadata), - ), - ) - - # NOTE: The priority class is cluster-scoped and a namespace-scoped resource cannot be an owner - # of a cluster-scoped resource. That is why the priority class is an owner of the quota. - quota_manifest.owner_references = [ - client.V1OwnerReference( - api_version=pc.api_version, - block_owner_deletion=True, - controller=False, - kind=pc.kind, - name=pc.metadata.name, - uid=pc.metadata.uid, - ) - ] - self.core_client.create_namespaced_resource_quota(self.namespace, quota_manifest) - return quota - - def delete_quota(self, name: str) -> None: - """Delete a resource quota and priority class.""" - try: - self.scheduling_client.delete_priority_class( - name=name, body=client.V1DeleteOptions(propagation_policy="Foreground") - ) - self.core_client.delete_namespaced_resource_quota(name=name, namespace=self.namespace) - except client.ApiException as e: - if e.status == 404: - # NOTE: The priorityclass is an owner of the resource quota so when the priority class is deleted the - # resource quota is also deleted. Also, we don't care if the thing we are trying to delete is not there - # we have the desired state so we can just go on. - return - raise - - def update_quota(self, quota: models.Quota) -> models.Quota: - """Update a specific resource quota.""" - - quota_manifest = self._quota_to_manifest(quota) - self.core_client.patch_namespaced_resource_quota(name=quota.id, namespace=self.namespace, body=quota_manifest) - return quota diff --git a/components/renku_data_services/k8s/watcher/__init__.py b/components/renku_data_services/k8s/watcher/__init__.py new file mode 100644 index 000000000..2d5bbf210 --- /dev/null +++ b/components/renku_data_services/k8s/watcher/__init__.py @@ -0,0 +1,5 @@ +"""K8s watcher.""" + +from renku_data_services.k8s.watcher.core import K8sWatcher, k8s_object_handler + +__all__ = ["K8sWatcher", "k8s_object_handler"] diff --git a/components/renku_data_services/k8s_watcher/core.py b/components/renku_data_services/k8s/watcher/core.py similarity index 68% rename from components/renku_data_services/k8s_watcher/core.py rename to components/renku_data_services/k8s/watcher/core.py index c9af1c1d0..925701e3d 100644 --- a/components/renku_data_services/k8s_watcher/core.py +++ b/components/renku_data_services/k8s/watcher/core.py @@ -7,22 +7,19 @@ from asyncio import CancelledError, Task from collections.abc import Awaitable, Callable from datetime import datetime, timedelta -from typing import TYPE_CHECKING from renku_data_services.app_config import logging from renku_data_services.base_models.core import APIUser, InternalServiceAdmin, ServiceAdminId from renku_data_services.base_models.metrics import MetricsService from renku_data_services.crc.db import ResourcePoolRepository from renku_data_services.k8s.clients import K8sClusterClient -from renku_data_services.k8s.models import GVK, K8sObject, K8sObjectFilter -from renku_data_services.k8s_watcher.db import K8sDbCache +from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER, ClusterId +from renku_data_services.k8s.db import K8sDbCache +from renku_data_services.k8s.models import GVK, APIObjectInCluster, K8sObject, K8sObjectFilter from renku_data_services.notebooks.crs import State logger = logging.getLogger(__name__) -if TYPE_CHECKING: - from renku_data_services.k8s.constants import ClusterId - from renku_data_services.k8s.models import APIObjectInCluster, Cluster type EventHandler = Callable[[APIObjectInCluster, str], Awaitable[None]] type SyncFunc = Callable[[], Awaitable[None]] @@ -36,7 +33,7 @@ class K8sWatcher: def __init__( self, handler: EventHandler, - clusters: dict[ClusterId, Cluster], + clusters: dict[ClusterId, K8sClusterClient], kinds: list[GVK], db_cache: K8sDbCache, ) -> None: @@ -50,77 +47,99 @@ def __init__( self.__sync_period_seconds = 600 self.__cache = db_cache - async def __sync(self, cluster: Cluster, kind: GVK) -> None: + async def __sync(self, client: K8sClusterClient, kind: GVK, raise_exceptions: bool = False) -> None: """Upsert K8s objects in the cache and remove deleted objects from the cache.""" - clnt = K8sClusterClient(cluster) - fltr = K8sObjectFilter(gvk=kind, cluster=cluster.id, namespace=cluster.namespace) + + fltr = K8sObjectFilter(gvk=kind, cluster=client.get_cluster().id, namespace=client.get_cluster().namespace) # Upsert new / updated objects objects_in_k8s: dict[str, K8sObject] = {} - async for obj in clnt.list(fltr): - objects_in_k8s[obj.name] = obj - await self.__cache.upsert(obj) - # Remove objects that have been deleted from k8s but are still in cache - async for cache_obj in self.__cache.list(fltr): - cache_obj_is_in_k8s = objects_in_k8s.get(cache_obj.name) is not None - if cache_obj_is_in_k8s: - continue - await self.__cache.delete(cache_obj) - - async def __full_sync(self, cluster: Cluster) -> None: + obj_iter = aiter(client.list(fltr)) + while True: + try: + obj = await anext(obj_iter) + except StopAsyncIteration: + break # No more items to list + except Exception as e: + logger.error(f"Failed to list objects: {e}") + if raise_exceptions: + raise e + else: + objects_in_k8s[obj.name] = obj + await self.__cache.upsert(obj) + + cache_iter = aiter(self.__cache.list(fltr)) + while True: + try: + cache_obj = await anext(cache_iter) + except StopAsyncIteration: + break # No more items to list + except Exception as e: + logger.error(f"Failed to list objects: {e}") + if raise_exceptions: + raise e + else: + # Remove objects that have been deleted from k8s but are still in cache + if objects_in_k8s.get(cache_obj.name) is None: + await self.__cache.delete(cache_obj) + + async def __full_sync(self, client: K8sClusterClient) -> None: """Run the full sync if it has never run or at the required interval.""" - last_sync = self.__full_sync_times.get(cluster.id) + cluster_id = client.get_cluster().id + last_sync = self.__full_sync_times.get(cluster_id) since_last_sync = datetime.now() - last_sync if last_sync is not None else None if since_last_sync is not None and since_last_sync.total_seconds() < self.__sync_period_seconds: return - self.__full_sync_running.add(cluster.id) + self.__full_sync_running.add(cluster_id) for kind in self.__kinds: - logger.info(f"Starting full k8s cache sync for cluster {cluster} and kind {kind}") - await self.__sync(cluster, kind) - self.__full_sync_times[cluster.id] = datetime.now() - self.__full_sync_running.remove(cluster.id) + logger.info(f"Starting full k8s cache sync for cluster {cluster_id} and kind {kind}") + await self.__sync(client, kind, cluster_id == DEFAULT_K8S_CLUSTER) + self.__full_sync_times[cluster_id] = datetime.now() + self.__full_sync_running.remove(cluster_id) - async def __periodic_full_sync(self, cluster: Cluster) -> None: + async def __periodic_full_sync(self, client: K8sClusterClient) -> None: """Keeps trying to run the full sync.""" while True: - await self.__full_sync(cluster) + await self.__full_sync(client) await asyncio.sleep(self.__sync_period_seconds / 10) - async def __watch_kind(self, kind: GVK, cluster: Cluster) -> None: + async def __watch_kind(self, kind: GVK, client: K8sClusterClient) -> None: + logger.info(f"Watching kind {kind} for {client}") + cluster = client.get_cluster() + cluster_id = cluster.id while True: try: watch = cluster.api.async_watch(kind=kind.kr8s_kind, namespace=cluster.namespace) async for event_type, obj in watch: - while cluster.id in self.__full_sync_running: + if cluster_id in self.__full_sync_running: logger.info( f"Pausing k8s watch event processing for cluster {cluster} until full sync completes" ) - await asyncio.sleep(5) - await self.__handler(cluster.with_api_object(obj), event_type) - # in some cases, the kr8s loop above just never yields, especially if there's exceptions which - # can bypass async scheduling. This sleep here is as a last line of defence so this code does not - # execute indefinitely and prevent another resource kind from being watched. - await asyncio.sleep(0) - except Exception as e: - logger.error(f"watch loop failed for {kind} in cluster {cluster.id}", exc_info=e) - # without sleeping, this can just hang the code as exceptions seem to bypass the async scheduler - await asyncio.sleep(1) + else: + await self.__handler(cluster.with_api_object(obj), event_type) + except ValueError: pass + except Exception as e: + logger.error(f"watch loop failed for {kind} in cluster {cluster_id}", exc_info=e) + + # Add a sleep to prevent retrying in a loop the same action instantly. + await asyncio.sleep(10) - def __run_single(self, cluster: Cluster) -> list[Task]: + def __run_single(self, client: K8sClusterClient) -> list[Task]: # The loops and error handling here will need some testing and love tasks = [] for kind in self.__kinds: - logger.info(f"watching {kind} in cluster {cluster.id}") - tasks.append(asyncio.create_task(self.__watch_kind(kind, cluster))) + logger.info(f"watching {kind} in cluster {client.get_cluster().id}") + tasks.append(asyncio.create_task(self.__watch_kind(kind, client))) return tasks async def start(self) -> None: """Start the watcher.""" - for cluster in sorted(self.__clusters.values(), key=lambda x: x.id): - await self.__full_sync(cluster) - self.__full_sync_tasks[cluster.id] = asyncio.create_task(self.__periodic_full_sync(cluster)) - self.__watch_tasks[cluster.id] = self.__run_single(cluster) + for cluster_id in sorted(self.__clusters.keys()): + if (client := self.__clusters.get(cluster_id)) is not None: + await self.__full_sync(client) + self.__full_sync_tasks[cluster_id] = asyncio.create_task(self.__periodic_full_sync(client)) + self.__watch_tasks[cluster_id] = self.__run_single(client) async def wait(self) -> None: """Wait for all tasks. diff --git a/components/renku_data_services/k8s_watcher/__init__.py b/components/renku_data_services/k8s_watcher/__init__.py deleted file mode 100644 index 1ab1a3d94..000000000 --- a/components/renku_data_services/k8s_watcher/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -"""K8s watcher.""" - -from renku_data_services.k8s_watcher.core import K8sWatcher, k8s_object_handler -from renku_data_services.k8s_watcher.db import K8sDbCache -from renku_data_services.k8s_watcher.orm import BaseORM - -__all__ = ["K8sWatcher", "k8s_object_handler", "K8sDbCache", "BaseORM"] diff --git a/components/renku_data_services/k8s_watcher/db.py b/components/renku_data_services/k8s_watcher/db.py deleted file mode 100644 index 411ffa87f..000000000 --- a/components/renku_data_services/k8s_watcher/db.py +++ /dev/null @@ -1,102 +0,0 @@ -"""K8s watcher database and k8s wrappers.""" - -from __future__ import annotations - -from collections.abc import AsyncIterable, Callable - -import sqlalchemy -from sqlalchemy import Select, bindparam, select -from sqlalchemy.dialects.postgresql import JSONB -from sqlalchemy.ext.asyncio import AsyncSession - -from renku_data_services.errors import errors -from renku_data_services.k8s.models import K8sObject, K8sObjectFilter, K8sObjectMeta -from renku_data_services.k8s_watcher.orm import K8sObjectORM - - -class K8sDbCache: - """Caching k8s objects in postgres.""" - - def __init__(self, session_maker: Callable[..., AsyncSession]) -> None: - self.__session_maker = session_maker - - @staticmethod - def __get_where_clauses(_filter: K8sObjectFilter) -> Select[tuple[K8sObjectORM]]: - stmt = select(K8sObjectORM) - if _filter.name is not None: - stmt = stmt.where(K8sObjectORM.name == _filter.name) - if _filter.namespace is not None: - stmt = stmt.where(K8sObjectORM.namespace == _filter.namespace) - if _filter.cluster is not None: - stmt = stmt.where(K8sObjectORM.cluster == str(_filter.cluster)) - if _filter.gvk is not None: - stmt = stmt.where(K8sObjectORM.kind_insensitive == _filter.gvk.kind) - stmt = stmt.where(K8sObjectORM.version_insensitive == _filter.gvk.version) - if _filter.gvk.group is None: - stmt = stmt.where(K8sObjectORM.group.is_(None)) - else: - stmt = stmt.where(K8sObjectORM.group_insensitive == _filter.gvk.group) - if _filter.user_id is not None: - stmt = stmt.where(K8sObjectORM.user_id == _filter.user_id) - if _filter.label_selector is not None: - stmt = stmt.where( - # K8sObjectORM.manifest.comparator.contains({"metadata": {"labels": filter.label_selector}}) - sqlalchemy.text("manifest -> 'metadata' -> 'labels' @> :labels").bindparams( - bindparam("labels", _filter.label_selector, type_=JSONB) - ) - ) - return stmt - - async def __get(self, meta: K8sObjectMeta, session: AsyncSession) -> K8sObjectORM | None: - stmt = self.__get_where_clauses(meta.to_filter()) - obj_orm = await session.scalar(stmt) - return obj_orm - - async def upsert(self, obj: K8sObject) -> None: - """Insert or update an object in the cache.""" - if obj.user_id is None: - raise errors.ValidationError(message="user_id is required to upsert k8s object.") - async with self.__session_maker() as session, session.begin(): - obj_orm = await self.__get(obj, session) - if obj_orm is not None: - obj_orm.manifest = obj.manifest - await session.commit() - await session.flush() - return - obj_orm = K8sObjectORM( - name=obj.name, - namespace=obj.namespace or "default", - group=obj.gvk.group, - kind=obj.gvk.kind, - version=obj.gvk.version, - manifest=obj.manifest.to_dict(), - cluster=str(obj.cluster), - user_id=obj.user_id, - ) - session.add(obj_orm) - await session.commit() - await session.flush() - return - - async def delete(self, meta: K8sObjectMeta) -> None: - """Delete an object from the cache.""" - async with self.__session_maker() as session, session.begin(): - obj_orm = await self.__get(meta, session) - if obj_orm is not None: - await session.delete(obj_orm) - - async def get(self, meta: K8sObjectMeta) -> K8sObject | None: - """Get a single object from the cache.""" - async with self.__session_maker() as session, session.begin(): - obj_orm = await self.__get(meta, session) - if obj_orm is not None: - return meta.with_manifest(obj_orm.manifest) - - return None - - async def list(self, _filter: K8sObjectFilter) -> AsyncIterable[K8sObject]: - """List objects from the cache.""" - async with self.__session_maker() as session, session.begin(): - stmt = self.__get_where_clauses(_filter) - async for res in await session.stream_scalars(stmt): - yield res.dump() diff --git a/components/renku_data_services/migrations/env.py b/components/renku_data_services/migrations/env.py index 08c5d9f17..046add8c8 100644 --- a/components/renku_data_services/migrations/env.py +++ b/components/renku_data_services/migrations/env.py @@ -4,7 +4,7 @@ from renku_data_services.connected_services.orm import BaseORM as connected_services from renku_data_services.crc.orm import BaseORM as crc from renku_data_services.data_connectors.orm import BaseORM as data_connectors -from renku_data_services.k8s_watcher import BaseORM as k8s_cache +from renku_data_services.k8s.orm import BaseORM as k8s_cache from renku_data_services.message_queue.orm import BaseORM as events from renku_data_services.metrics.orm import BaseORM as metrics from renku_data_services.migrations.utils import run_migrations diff --git a/components/renku_data_services/migrations/versions/35ea9d8f54e8_add_image_registry_url.py b/components/renku_data_services/migrations/versions/35ea9d8f54e8_add_image_registry_url.py new file mode 100644 index 000000000..ee230a256 --- /dev/null +++ b/components/renku_data_services/migrations/versions/35ea9d8f54e8_add_image_registry_url.py @@ -0,0 +1,30 @@ +"""add image registry url + +Revision ID: 35ea9d8f54e8 +Revises: c8061499b966 +Create Date: 2025-08-27 14:34:34.190341 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "35ea9d8f54e8" +down_revision = "c8061499b966" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "oauth2_clients", sa.Column("image_registry_url", sa.String(), nullable=True), schema="connected_services" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("oauth2_clients", "image_registry_url", schema="connected_services") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/3aa50593f4e4_feat_add_support_for_remote_sessions_to_.py b/components/renku_data_services/migrations/versions/3aa50593f4e4_feat_add_support_for_remote_sessions_to_.py new file mode 100644 index 000000000..76a8b57df --- /dev/null +++ b/components/renku_data_services/migrations/versions/3aa50593f4e4_feat_add_support_for_remote_sessions_to_.py @@ -0,0 +1,60 @@ +"""feat: add support for remote sessions to resource pools + +Revision ID: 3aa50593f4e4 +Revises: 8365db35dc76 +Create Date: 2025-09-18 13:31:32.392300 + +""" + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +# revision identifiers, used by Alembic. +revision = "3aa50593f4e4" +down_revision = "8365db35dc76" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "resource_pools", sa.Column("remote_provider_id", sa.String(length=99), nullable=True), schema="resource_pools" + ) + op.add_column( + "resource_pools", + sa.Column( + "remote_json", sa.JSON().with_variant(postgresql.JSONB(astext_type=sa.Text()), "postgresql"), nullable=True + ), + schema="resource_pools", + ) + op.create_index( + op.f("ix_resource_pools_resource_pools_remote_provider_id"), + "resource_pools", + ["remote_provider_id"], + unique=False, + schema="resource_pools", + ) + op.create_foreign_key( + "resource_pools_remote_provider_id_fk", + "resource_pools", + "oauth2_clients", + ["remote_provider_id"], + ["id"], + source_schema="resource_pools", + referent_schema="connected_services", + ondelete="RESTRICT", + ) + + +def downgrade() -> None: + op.drop_constraint( + "resource_pools_remote_provider_id_fk", "resource_pools", schema="resource_pools", type_="foreignkey" + ) + op.drop_index( + op.f("ix_resource_pools_resource_pools_remote_provider_id"), + table_name="resource_pools", + schema="resource_pools", + ) + op.drop_column("resource_pools", "remote_json", schema="resource_pools") + op.drop_column("resource_pools", "remote_provider_id", schema="resource_pools") diff --git a/components/renku_data_services/migrations/versions/66e2f1271cf6_add_missing_entity_slug_constraints.py b/components/renku_data_services/migrations/versions/66e2f1271cf6_add_missing_entity_slug_constraints.py new file mode 100644 index 000000000..c40a18b31 --- /dev/null +++ b/components/renku_data_services/migrations/versions/66e2f1271cf6_add_missing_entity_slug_constraints.py @@ -0,0 +1,162 @@ +"""add missing entity slug constraints + +Revision ID: 66e2f1271cf6 +Revises: 35ea9d8f54e8 +Create Date: 2025-09-04 13:05:39.396089 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "66e2f1271cf6" +down_revision = "35ea9d8f54e8" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # Deduplicate existing data that may have conflicts that the new constraints below will not allow + subquery1 = ( + # Duplicate project slugs + "SELECT id, ROW_NUMBER() OVER (PARTITION BY namespace_id, slug ORDER by id DESC) as row_number " + "FROM common.entity_slugs " + "WHERE data_connector_id IS NULL AND project_id IS NOT NULL " + "UNION " + # Duplicate slugs for DCs owned by user/group + "SELECT id, ROW_NUMBER() OVER (PARTITION BY namespace_id, slug ORDER by id DESC) as row_number " + "FROM common.entity_slugs " + "WHERE data_connector_id IS NOT NULL AND project_id IS NULL " + "UNION " + # Duplicate slugs for DCs owned by a project + "SELECT id, ROW_NUMBER() OVER (PARTITION BY namespace_id, project_id, slug ORDER by id DESC) as row_number " + "FROM common.entity_slugs " + "WHERE data_connector_id IS NOT NULL AND project_id IS NOT NULL " + ) + # NOTE: There should be 1 row per group, if there are 2 or more rows those are all duplicates that need to be deduplicated + subquery2 = f"SELECT id FROM ({subquery1}) WHERE row_number >= 2" # nosec: B608 + op.execute( + # NOTE: We truncate the original slug to make sure we have enough space since the slug can be 99 characters long. + sa.text( + f"UPDATE common.entity_slugs SET slug = LEFT(slug,83) || '-duplicate-' || substr(md5(random()::text), 1, 5) WHERE id IN ({subquery2}) " # nosec: B608 + ) + ) + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + "entity_slugs_unique_slugs", table_name="entity_slugs", schema="common", postgresql_nulls_not_distinct=True + ) + op.create_index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_1", + "entity_slugs", + ["namespace_id", "project_id", "slug"], + unique=True, + schema="common", + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ) + op.create_index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_2", + "entity_slugs", + ["namespace_id", "data_connector_id", "slug"], + unique=True, + schema="common", + postgresql_nulls_not_distinct=True, + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ) + op.create_index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_3", + "entity_slugs", + ["namespace_id", "project_id", "data_connector_id"], + unique=True, + schema="common", + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ) + op.create_index( + "entity_slugs_unique_slugs_dc_in_group_user_slugs", + "entity_slugs", + ["namespace_id", "slug"], + unique=True, + schema="common", + postgresql_where="project_id IS NULL", + ) + op.create_index( + "entity_slugs_unique_slugs_project_slugs", + "entity_slugs", + ["namespace_id", "slug"], + unique=True, + schema="common", + postgresql_where="data_connector_id IS NULL", + ) + op.create_index( + "entity_slugs_unique_slugs_same_dc_in_group_user_two_slugs", + "entity_slugs", + ["namespace_id", "data_connector_id"], + unique=True, + schema="common", + postgresql_where="project_id IS NULL", + ) + op.create_index( + "entity_slugs_unique_slugs_same_project_two_slugs", + "entity_slugs", + ["namespace_id", "project_id"], + unique=True, + schema="common", + postgresql_where="data_connector_id IS NULL", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index( + "entity_slugs_unique_slugs_same_project_two_slugs", + table_name="entity_slugs", + schema="common", + postgresql_where="data_connector_id IS NULL", + ) + op.drop_index( + "entity_slugs_unique_slugs_same_dc_in_group_user_two_slugs", + table_name="entity_slugs", + schema="common", + postgresql_where="project_id IS NULL", + ) + op.drop_index( + "entity_slugs_unique_slugs_project_slugs", + table_name="entity_slugs", + schema="common", + postgresql_where="data_connector_id IS NULL", + ) + op.drop_index( + "entity_slugs_unique_slugs_dc_in_group_user_slugs", + table_name="entity_slugs", + schema="common", + postgresql_where="project_id IS NULL", + ) + op.drop_index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_3", + table_name="entity_slugs", + schema="common", + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ) + op.drop_index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_2", + table_name="entity_slugs", + schema="common", + postgresql_nulls_not_distinct=True, + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ) + op.drop_index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_1", + table_name="entity_slugs", + schema="common", + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ) + op.create_index( + "entity_slugs_unique_slugs", + "entity_slugs", + ["namespace_id", "project_id", "data_connector_id", "slug"], + unique=True, + schema="common", + postgresql_nulls_not_distinct=True, + ) + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/8365db35dc76_create_url_redirects.py b/components/renku_data_services/migrations/versions/8365db35dc76_create_url_redirects.py new file mode 100644 index 000000000..ff4a1ec6f --- /dev/null +++ b/components/renku_data_services/migrations/versions/8365db35dc76_create_url_redirects.py @@ -0,0 +1,47 @@ +""" + +Revision ID: 8365db35dc76 +Revises: fe61e825d95e +Create Date: 2025-08-28 15:16:43.330834 + +""" + +import sqlalchemy as sa +from alembic import op + +from renku_data_services.utils.sqlalchemy import ULIDType + +# revision identifiers, used by Alembic. +revision = "8365db35dc76" +down_revision = "fe61e825d95e" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.create_table( + "url_redirects", + sa.Column("id", ULIDType(), nullable=False), + sa.Column("source_url", sa.String(), nullable=False), + sa.Column("target_url", sa.String(), nullable=False), + sa.Column("creation_date", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), server_default=sa.text("now()"), nullable=False), + sa.PrimaryKeyConstraint("id"), + schema="platform", + ) + op.create_index( + op.f("ix_platform_url_redirects_source_url"), "url_redirects", ["source_url"], unique=True, schema="platform" + ) + op.create_index( + op.f("ix_platform_url_redirects_target_url"), "url_redirects", ["target_url"], unique=False, schema="platform" + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f("ix_platform_url_redirects_target_url"), table_name="url_redirects", schema="platform") + op.drop_index(op.f("ix_platform_url_redirects_source_url"), table_name="url_redirects", schema="platform") + op.drop_table("url_redirects", schema="platform") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/c8061499b966_change_cluster_name_and_uniuqe_.py b/components/renku_data_services/migrations/versions/c8061499b966_change_cluster_name_and_uniuqe_.py new file mode 100644 index 000000000..57ad2b783 --- /dev/null +++ b/components/renku_data_services/migrations/versions/c8061499b966_change_cluster_name_and_uniuqe_.py @@ -0,0 +1,44 @@ +"""change cluster name and uniuqe constraint + +Revision ID: c8061499b966 +Revises: e117405fed51 +Create Date: 2025-08-14 07:40:10.492620 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "c8061499b966" +down_revision = "e117405fed51" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.execute( + sa.text( + "UPDATE common.k8s_objects SET cluster='0RENK1RENK2RENK3RENK4RENK5' where k8s_objects.cluster='renkulab'" + ) + ) + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index("ix_common_k8s_objects_name", table_name="k8s_objects", schema="common") + op.create_index(op.f("ix_common_k8s_objects_name"), "k8s_objects", ["name"], unique=False, schema="common") + op.create_unique_constraint( + "_unique_common_k8s_objects_gvk_cluster_namespace_name", + "k8s_objects", + ["group", "version", "kind", "cluster", "namespace", "name"], + schema="common", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_constraint( + "_unique_common_k8s_objects_gvk_cluster_namespace_name", "k8s_objects", schema="common", type_="unique" + ) + op.drop_index(op.f("ix_common_k8s_objects_name"), table_name="k8s_objects", schema="common") + op.create_index("ix_common_k8s_objects_name", "k8s_objects", ["name"], unique=True, schema="common") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/e117405fed51_add_strip_prefix_option_to_environments.py b/components/renku_data_services/migrations/versions/e117405fed51_add_strip_prefix_option_to_environments.py new file mode 100644 index 000000000..3800acf60 --- /dev/null +++ b/components/renku_data_services/migrations/versions/e117405fed51_add_strip_prefix_option_to_environments.py @@ -0,0 +1,32 @@ +"""add strip prefix option to environments + +Revision ID: e117405fed51 +Revises: ca3731b65787 +Create Date: 2025-03-27 16:48:51.460749 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "e117405fed51" +down_revision = "ca3731b65787" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.add_column( + "environments", + sa.Column("strip_path_prefix", sa.Boolean(), server_default=sa.text("false"), nullable=False), + schema="sessions", + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic - please adjust! ### + op.drop_column("environments", "strip_path_prefix", schema="sessions") + # ### end Alembic commands ### diff --git a/components/renku_data_services/migrations/versions/fe61e825d95e_add_generic_oidc_provider_type.py b/components/renku_data_services/migrations/versions/fe61e825d95e_add_generic_oidc_provider_type.py new file mode 100644 index 000000000..4182569c5 --- /dev/null +++ b/components/renku_data_services/migrations/versions/fe61e825d95e_add_generic_oidc_provider_type.py @@ -0,0 +1,35 @@ +"""add generic_oidc provider type + +Revision ID: fe61e825d95e +Revises: 66e2f1271cf6 +Create Date: 2025-09-05 09:30:23.062585 + +""" + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision = "fe61e825d95e" +down_revision = "66e2f1271cf6" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.execute("ALTER TYPE providerkind ADD VALUE 'generic_oidc'") + op.add_column( + "oauth2_clients", sa.Column("oidc_issuer_url", sa.String(), nullable=True), schema="connected_services" + ) + + +def downgrade() -> None: + op.drop_column("oauth2_clients", "oidc_issuer_url", schema="connected_services") + # NOTE: Postgres does not allow removing values from an enum + op.execute("DELETE FROM connected_services.oauth2_clients WHERE kind = 'generic_oidc'") + op.execute("ALTER TYPE providerkind RENAME TO providerkind_old;") + op.execute("CREATE TYPE providerkind AS ENUM ('gitlab', 'github', 'drive', 'onedrive', 'dropbox')") + op.execute( + "ALTER TABLE connected_services.oauth2_clients ALTER COLUMN kind SET DATA TYPE providerkind USING kind::text::providerkind" + ) + op.execute("DROP TYPE providerkind_old CASCADE") diff --git a/components/renku_data_services/namespace/orm.py b/components/renku_data_services/namespace/orm.py index 8830f527e..ea5d32f9d 100644 --- a/components/renku_data_services/namespace/orm.py +++ b/components/renku_data_services/namespace/orm.py @@ -246,14 +246,93 @@ class EntitySlugORM(BaseORM): __tablename__ = "entity_slugs" __table_args__ = ( + # NOTE: prevents 2 different projects from having the same slug + # I.e. an invalid example like this: + # namespace_id | project_id | data_connector_id | slug + # 1 | 1 | NULL | prj1 + # 1 | 2 | NULL | prj1 Index( - "entity_slugs_unique_slugs", + "entity_slugs_unique_slugs_project_slugs", + "namespace_id", + "slug", + unique=True, + postgresql_where="data_connector_id IS NULL", + ), + # NOTE: prevents the same project from having different slugs + # I.e. an invalid example like this: + # namespace_id | project_id | data_connector_id | slug + # 1 | 1 | NULL | prj1 + # 1 | 1 | NULL | prj2 + Index( + "entity_slugs_unique_slugs_same_project_two_slugs", "namespace_id", "project_id", + unique=True, + postgresql_where="data_connector_id IS NULL", + ), + # NOTE: prevents 2 different data connectors owned by group or user from having the same slug + # I.e. an invalid example like this: + # namespace_id | project_id | data_connector_id | slug + # 1 | NULL | 1 | dc1 + # 1 | NULL | 2 | dc1 + Index( + "entity_slugs_unique_slugs_dc_in_group_user_slugs", + "namespace_id", + "slug", + unique=True, + postgresql_where="project_id IS NULL", + ), + # NOTE: prevents the same data connector owned by group or user from having 2 different slugs + # I.e. an invalid example like this: + # namespace_id | project_id | data_connector_id | slug + # 1 | NULL | 1 | dc1 + # 1 | NULL | 1 | dc2 + Index( + "entity_slugs_unique_slugs_same_dc_in_group_user_two_slugs", + "namespace_id", + "data_connector_id", + unique=True, + postgresql_where="project_id IS NULL", + ), + # NOTE: prevents 2 different data connectors owned by the same project from having the same slug + # I.e. an invalid example like this: + # namespace_id | project_id | data_connector_id | slug + # 1 | 1 | 1 | dc1 + # 1 | 1 | 2 | dc1 + Index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_1", + "namespace_id", + "project_id", + "slug", + unique=True, + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ), + # NOTE: prevents the same data connector with the same slug being owned by different projects + # I.e. an invalid example like this: + # namespace_id | project_id | data_connector_id | slug + # 1 | 1 | 1 | dc1 + # 1 | 2 | 1 | dc1 + Index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_2", + "namespace_id", "data_connector_id", "slug", unique=True, postgresql_nulls_not_distinct=True, + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", + ), + # NOTE: prevents the same data connector owned by the same project having 2 different slugs + # I.e. an invalid example like this: + # namespace_id | project_id | data_connector_id | slug + # 1 | 1 | 1 | dc1 + # 1 | 1 | 1 | dc2 + Index( + "entity_slugs_unique_slugs_data_connector_in_project_slugs_3", + "namespace_id", + "project_id", + "data_connector_id", + unique=True, + postgresql_where="project_id IS NOT NULL AND data_connector_id IS NOT NULL", ), CheckConstraint( "(project_id IS NOT NULL) OR (data_connector_id IS NOT NULL)", diff --git a/components/renku_data_services/notebooks/api.spec.yaml b/components/renku_data_services/notebooks/api.spec.yaml index cf11d5cc5..1ded7088b 100644 --- a/components/renku_data_services/notebooks/api.spec.yaml +++ b/components/renku_data_services/notebooks/api.spec.yaml @@ -21,9 +21,16 @@ paths: minLength: 1 responses: '200': - description: The Docker image is available. - '404': - description: The Docker image is not available. + content: + application/json: + schema: + "$ref": "#/components/schemas/ImageCheckResponse" + '422': + content: + application/json: + schema: + "$ref": "#/components/schemas/ErrorResponse" + description: The image url is malformed. tags: - notebooks "/notebooks/logs/{server_name}": @@ -367,15 +374,17 @@ paths: minLength: 1 responses: "200": - description: The docker image can be found - "404": - description: The docker image cannot be found or the user does not have permissions to access it + description: Information about the accessibility of the image content: application/json: schema: - $ref: "#/components/schemas/ErrorResponse" - default: - $ref: "#/components/responses/Error" + "$ref": "#/components/schemas/ImageCheckResponse" + "422": + description: The image url is malformed. + content: + application/json: + schema: + "$ref": "#/components/schemas/ErrorResponse" tags: - sessions components: @@ -1036,7 +1045,53 @@ components: maxLength: 50 pattern: '^[a-z]([-a-z0-9]*[a-z0-9])?$' example: d185e68d-d43-renku-2-b9ac279a4e8a85ac28d08 + ImageCheckResponse: + type: object + properties: + accessible: + type: boolean + description: Whether the image is accessible or not. + connection: + "$ref": "#/components/schemas/ImageConnection" + provider: + "$ref": "#/components/schemas/ImageProvider" + required: + - accessible + ImageConnection: + type: object + properties: + id: + type: string + provider_id: + type: string + status: + "$ref": "#/components/schemas/ImageConnectionStatus" + required: + - id + - provider_id + - status + ImageConnectionStatus: + type: string + enum: + - connected + - pending + - invalid_credentials + ImageProvider: + type: object + properties: + id: + type: string + name: + type: string + url: + type: string + required: + - id + - name + - url responses: + ImageCheckResponse: + description: Information about whether a docker image is available or not and if there is a connected service then which connected service can be used to access the image. Error: description: The schema for all 4xx and 5xx responses content: diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py b/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py index 78c97f092..dc4f88ad6 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/init_containers.py @@ -11,10 +11,20 @@ from kubernetes import client from renku_data_services.base_models.core import AnonymousAPIUser, AuthenticatedAPIUser -from renku_data_services.notebooks.api.amalthea_patches.utils import get_certificates_volume_mounts +from renku_data_services.notebooks.api.amalthea_patches.utils import ( + get_certificates_volume_mounts, + get_certificates_volume_mounts_unserialized, +) from renku_data_services.notebooks.api.classes.repository import GitProvider, Repository from renku_data_services.notebooks.config import NotebooksConfig -from renku_data_services.notebooks.crs import EmptyDir, ExtraVolume, ExtraVolumeMount, InitContainer, SecretAsVolume +from renku_data_services.notebooks.crs import ( + EmptyDir, + ExtraVolume, + ExtraVolumeMount, + InitContainer, + SecretAsVolume, +) +from renku_data_services.notebooks.models import SessionExtraResources from renku_data_services.project import constants as project_constants from renku_data_services.project.models import SessionSecret @@ -92,6 +102,7 @@ async def git_clone_container_v2( }, ] if user.is_authenticated: + env.append({"name": f"{prefix}GIT_PROXY_PORT", "value": str(config.sessions.git_proxy.port)}) if user.email: env.append( {"name": f"{prefix}USER__EMAIL", "value": user.email}, @@ -294,6 +305,16 @@ async def git_clone(server: UserServer) -> list[dict[str, Any]]: ] +def certificates_volume_mounts(config: NotebooksConfig) -> list[ExtraVolumeMount]: + """Get the volume mounts for the CA certificates.""" + return get_certificates_volume_mounts_unserialized( + config, + etc_certs=True, + custom_certs=True, + read_only_etc_certs=True, + ) + + def certificates_container(config: NotebooksConfig) -> tuple[client.V1Container, list[client.V1Volume]]: """The specification for the container that setups self signed CAs.""" init_container = client.V1Container( @@ -401,14 +422,14 @@ def download_image(server: UserServer) -> list[dict[str, Any]]: ] -def user_secrets_container( +def user_secrets_extras( user: AuthenticatedAPIUser | AnonymousAPIUser, config: NotebooksConfig, secrets_mount_directory: str, k8s_secret_name: str, session_secrets: list[SessionSecret], -) -> tuple[InitContainer, list[ExtraVolume], list[ExtraVolumeMount]] | None: - """The init container which decrypts user secrets to be mounted in the session.""" +) -> SessionExtraResources | None: + """The session extras which decrypts user secrets to be mounted in the session.""" if not session_secrets or user.is_anonymous: return None @@ -457,8 +478,8 @@ def user_secrets_container( ) ) - return ( - init_container, - [volume_k8s_secrets, volume_decrypted_secrets], - [decrypted_volume_mount], + return SessionExtraResources( + init_containers=[init_container], + volumes=[volume_k8s_secrets, volume_decrypted_secrets], + volume_mounts=[decrypted_volume_mount], ) diff --git a/components/renku_data_services/notebooks/api/amalthea_patches/utils.py b/components/renku_data_services/notebooks/api/amalthea_patches/utils.py index 650970977..2d546e666 100644 --- a/components/renku_data_services/notebooks/api/amalthea_patches/utils.py +++ b/components/renku_data_services/notebooks/api/amalthea_patches/utils.py @@ -5,28 +5,57 @@ from kubernetes import client from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.crs import ExtraVolumeMount -def get_certificates_volume_mounts( +def get_certificates_volume_mounts_unserialized( config: NotebooksConfig, etc_certs: bool = True, custom_certs: bool = True, read_only_etc_certs: bool = False, -) -> list[dict[str, Any]]: +) -> list[ExtraVolumeMount]: """The list of volume mounts for custom certificates.""" volume_mounts = [] - etc_ssl_certs = client.V1VolumeMount( + etc_ssl_certs = ExtraVolumeMount( name="etc-ssl-certs", - mount_path="/etc/ssl/certs/", - read_only=read_only_etc_certs, + mountPath="/etc/ssl/certs/", + readOnly=read_only_etc_certs, ) - custom_ca_certs = client.V1VolumeMount( + custom_ca_certs = ExtraVolumeMount( name="custom-ca-certs", - mount_path=config.sessions.ca_certs.path, - read_only=True, + mountPath=config.sessions.ca_certs.path, + readOnly=True, ) if etc_certs: volume_mounts.append(etc_ssl_certs) if custom_certs: volume_mounts.append(custom_ca_certs) - return cast(list[dict[str, Any]], client.ApiClient().sanitize_for_serialization(volume_mounts)) + return volume_mounts + + +def __convert_extra_volume_mounts(input: list[ExtraVolumeMount]) -> list[client.V1VolumeMount]: + """Convert between different volume mount types.""" + return [ + client.V1VolumeMount( + mount_path=vol.mountPath, + mount_propagation=vol.mountPropagation, + name=vol.name, + read_only=vol.readOnly, + recursive_read_only=vol.recursiveReadOnly, + sub_path=vol.subPath, + sub_path_expr=vol.subPathExpr, + ) + for vol in input + ] + + +def get_certificates_volume_mounts( + config: NotebooksConfig, + etc_certs: bool = True, + custom_certs: bool = True, + read_only_etc_certs: bool = False, +) -> list[dict[str, Any]]: + """The list of volume mounts for custom certificates.""" + vol_mounts = get_certificates_volume_mounts_unserialized(config, etc_certs, custom_certs, read_only_etc_certs) + vol_mounts_ser = __convert_extra_volume_mounts(vol_mounts) + return cast(list[dict[str, Any]], client.ApiClient().sanitize_for_serialization(vol_mounts_ser)) diff --git a/components/renku_data_services/notebooks/api/classes/data_service.py b/components/renku_data_services/notebooks/api/classes/data_service.py index 31e1a2d2b..3abcee82c 100644 --- a/components/renku_data_services/notebooks/api/classes/data_service.py +++ b/components/renku_data_services/notebooks/api/classes/data_service.py @@ -1,24 +1,28 @@ """Helpers for interacting wit the data service.""" +from __future__ import annotations + +import os from dataclasses import dataclass, field from typing import Optional from urllib.parse import urljoin, urlparse -import httpx - +from renku_data_services.app_config import logging from renku_data_services.base_models import APIUser +from renku_data_services.connected_services.db import ConnectedServicesRepository +from renku_data_services.connected_services.utils import GitHubProviderType, get_github_provider_type from renku_data_services.crc.db import ResourcePoolRepository from renku_data_services.crc.models import ResourceClass, ResourcePool from renku_data_services.notebooks.api.classes.repository import ( INTERNAL_GITLAB_PROVIDER, GitProvider, - OAuth2Connection, - OAuth2Provider, ) from renku_data_services.notebooks.api.schemas.server_options import ServerOptions -from renku_data_services.notebooks.errors.intermittent import IntermittentError +from renku_data_services.notebooks.config.dynamic import _GitConfig, _SessionConfig from renku_data_services.notebooks.errors.user import InvalidComputeResourceError +logger = logging.getLogger(__name__) + @dataclass class CRCValidator: @@ -145,76 +149,74 @@ async def find_acceptable_class( return self.options -@dataclass class GitProviderHelper: - """Calls to the data service to configure git providers.""" + """Gets the list of providers.""" - service_url: str - renku_url: str - internal_gitlab_url: str - - def __post_init__(self) -> None: - self.service_url = self.service_url.rstrip("/") - self.renku_url = self.renku_url.rstrip("/") + def __init__( + self, + connected_services_repo: ConnectedServicesRepository, + service_url: str, + renku_url: str, + internal_gitlab_url: str, + enable_internal_gitlab: bool, + ) -> None: + self.connected_services_repo = connected_services_repo + self.renku_url = renku_url.rstrip("/") + self.service_url = service_url.rstrip("/") + self.internal_gitlab_url: str = internal_gitlab_url + self.enable_internal_gitlab: bool = enable_internal_gitlab async def get_providers(self, user: APIUser) -> list[GitProvider]: """Get the providers for the specific user.""" if user is None or user.access_token is None: return [] - connections = await self.get_oauth2_connections(user=user) + + logger.debug(f"Get git providers for user {user.id}") + + connections = await self.connected_services_repo.get_oauth2_connections(user) providers: dict[str, GitProvider] = dict() for c in connections: if c.provider_id in providers: continue - provider = await self.get_oauth2_provider(c.provider_id) + provider = await self.connected_services_repo.get_oauth2_client(c.provider_id, user) + if get_github_provider_type(provider) == GitHubProviderType.oauth_app: + continue access_token_url = urljoin( self.renku_url, urlparse(f"{self.service_url}/oauth2/connections/{c.id}/token").path, ) providers[c.provider_id] = GitProvider( - id=c.provider_id, - url=provider.url, - connection_id=c.id, - access_token_url=access_token_url, + id=c.provider_id, url=provider.url, connection_id=str(c.id), access_token_url=access_token_url ) providers_list = list(providers.values()) # Insert the internal GitLab as the first provider - internal_gitlab_access_token_url = urljoin(self.renku_url, "/api/auth/gitlab/exchange") - providers_list.insert( - 0, - GitProvider( - id=INTERNAL_GITLAB_PROVIDER, - url=self.internal_gitlab_url, - connection_id="", - access_token_url=internal_gitlab_access_token_url, - ), - ) + if self.enable_internal_gitlab and self.internal_gitlab_url: + internal_gitlab_access_token_url = urljoin(self.renku_url, "/api/auth/gitlab/exchange") + providers_list.insert( + 0, + GitProvider( + id=INTERNAL_GITLAB_PROVIDER, + url=self.internal_gitlab_url, + connection_id="", + access_token_url=internal_gitlab_access_token_url, + ), + ) return providers_list - async def get_oauth2_connections(self, user: APIUser | None = None) -> list[OAuth2Connection]: - """Get oauth2 connections.""" - if user is None or user.access_token is None: - return [] - request_url = f"{self.service_url}/oauth2/connections" - headers = {"Authorization": f"bearer {user.access_token}"} - async with httpx.AsyncClient(timeout=10) as client: - res = await client.get(request_url, headers=headers) - if res.status_code != 200: - raise IntermittentError(message="The data service sent an unexpected response, please try again later") - connections = res.json() - connections = [OAuth2Connection.from_dict(c) for c in connections if c["status"] == "connected"] - return connections - - async def get_oauth2_provider(self, provider_id: str) -> OAuth2Provider: - """Get a specific provider.""" - request_url = f"{self.service_url}/oauth2/providers/{provider_id}" - async with httpx.AsyncClient(timeout=10) as client: - res = await client.get(request_url) - if res.status_code != 200: - raise IntermittentError(message="The data service sent an unexpected response, please try again later") - provider = res.json() - return OAuth2Provider.from_dict(provider) + @classmethod + def create(cls, csr: ConnectedServicesRepository, enable_internal_gitlab: bool) -> GitProviderHelper: + """Create an instance.""" + sessions_config = _SessionConfig.from_env() + git_config = _GitConfig.from_env(enable_internal_gitlab=enable_internal_gitlab) + data_service_url = os.environ.get("NB_DATA_SERVICE_URL", "http://127.0.0.1:8000") + return GitProviderHelper( + connected_services_repo=csr, + service_url=data_service_url, + renku_url=f"http://{sessions_config.ingress.host}", + internal_gitlab_url=git_config.url, + enable_internal_gitlab=enable_internal_gitlab, + ) @dataclass diff --git a/components/renku_data_services/notebooks/api/classes/image.py b/components/renku_data_services/notebooks/api/classes/image.py index d9317c6b6..a8170a6a0 100644 --- a/components/renku_data_services/notebooks/api/classes/image.py +++ b/components/renku_data_services/notebooks/api/classes/image.py @@ -1,5 +1,7 @@ """Used to get information about docker images used in jupyter servers.""" +from __future__ import annotations + import base64 import re from dataclasses import dataclass, field @@ -10,8 +12,11 @@ import httpx from werkzeug.datastructures import WWWAuthenticate +from renku_data_services.app_config import logging from renku_data_services.errors import errors +logger = logging.getLogger(__name__) + class ManifestTypes(Enum): """The mime types for docker image manifests.""" @@ -35,17 +40,24 @@ class ImageRepoDockerAPI: hostname: str oauth2_token: Optional[str] = field(default=None, repr=False) + # NOTE: We need to follow redirects so that we can authenticate with the image repositories properly. # NOTE: If we do not use default_factory to create the client here requests will fail because it can happen # that the client gets created in the wrong asyncio loop. client: httpx.AsyncClient = field(default_factory=lambda: httpx.AsyncClient(timeout=10, follow_redirects=True)) + scheme: str = "https" - async def _get_docker_token(self, image: "Image") -> Optional[str]: + def __post_init__(self) -> None: + self.hostname = self.hostname.rstrip("/") + if self.scheme == "": + self.scheme = "https" + + async def _get_docker_token(self, image: Image) -> Optional[str]: """Get an authorization token from the docker v2 API. This will return the token provided by the API (or None if no token was found). """ - image_digest_url = f"https://{self.hostname}/v2/{image.name}/manifests/{image.tag}" + image_digest_url = f"{self.scheme}://{self.hostname}/v2/{image.name}/manifests/{image.tag}" try: auth_req = await self.client.get(image_digest_url) except httpx.ConnectError: @@ -62,14 +74,14 @@ async def _get_docker_token(self, image: "Image") -> Optional[str]: return None headers = {"Accept": "application/json"} if self.oauth2_token: - creds = base64.urlsafe_b64encode(f"oauth2:{self.oauth2_token}".encode()).decode() + creds = base64.b64encode(f"oauth2:{self.oauth2_token}".encode()).decode() headers["Authorization"] = f"Basic {creds}" token_req = await self.client.get(realm, params=params, headers=headers) return str(token_req.json().get("token")) async def get_image_manifest( self, - image: "Image", + image: Image, platform_architecture: str = DEFAULT_PLATFORM_ARCHITECTURE, platform_os: str = DEFAULT_PLATFORM_OS, ) -> Optional[dict[str, Any]]: @@ -79,7 +91,7 @@ async def get_image_manifest( message=f"The image hostname {image.hostname} does not match the image repository {self.hostname}" ) token = await self._get_docker_token(image) - image_digest_url = f"https://{image.hostname}/v2/{image.name}/manifests/{image.tag}" + image_digest_url = f"{self.scheme}://{image.hostname}/v2/{image.name}/manifests/{image.tag}" headers = {"Accept": ManifestTypes.docker_v2.value} if token: headers["Authorization"] = f"Bearer {token}" @@ -105,7 +117,7 @@ def platform_matches(manifest: dict[str, Any]) -> bool: image_digest: str | None = manifest.get("digest") if not manifest or not image_digest: return None - image_digest_url = f"https://{image.hostname}/v2/{image.name}/manifests/{image_digest}" + image_digest_url = f"{self.scheme}://{image.hostname}/v2/{image.name}/manifests/{image_digest}" media_type = manifest.get("mediaType") headers["Accept"] = ManifestTypes.docker_v2.value if media_type in [ @@ -128,11 +140,26 @@ def platform_matches(manifest: dict[str, Any]) -> bool: return cast(dict[str, Any], res.json()) - async def image_exists(self, image: "Image") -> bool: + async def image_exists(self, image: Image) -> bool: """Check the docker repo API if the image exists.""" - return await self.get_image_manifest(image) is not None + return await self.image_check(image) == 200 - async def get_image_config(self, image: "Image") -> Optional[dict[str, Any]]: + async def image_check(self, image: Image) -> int: + """Check the image at the registry.""" + token = await self._get_docker_token(image) + image_digest_url = f"{self.scheme}://{image.hostname}/v2/{image.name}/manifests/{image.tag}" + accept_media = ",".join( + [e.value for e in [ManifestTypes.docker_v2, ManifestTypes.oci_v1_manifest, ManifestTypes.oci_v1_index]] + ) + headers = {"Accept": accept_media} + if token: + headers["Authorization"] = f"Bearer {token}" + + res = await self.client.head(image_digest_url, headers=headers) + logger.debug(f"Checked image access: {image_digest_url}: {res.status_code}") + return res.status_code + + async def get_image_config(self, image: Image) -> Optional[dict[str, Any]]: """Query the docker API to get the configuration of an image.""" manifest = await self.get_image_manifest(image) if manifest is None: @@ -142,7 +169,7 @@ async def get_image_config(self, image: "Image") -> Optional[dict[str, Any]]: return None token = await self._get_docker_token(image) res = await self.client.get( - f"https://{image.hostname}/v2/{image.name}/blobs/{config_digest}", + f"{self.scheme}://{image.hostname}/v2/{image.name}/blobs/{config_digest}", headers={ "Accept": "application/json", "Authorization": f"Bearer {token}", @@ -152,7 +179,7 @@ async def get_image_config(self, image: "Image") -> Optional[dict[str, Any]]: return None return cast(dict[str, Any], res.json()) - async def image_workdir(self, image: "Image") -> Optional[PurePosixPath]: + async def image_workdir(self, image: Image) -> Optional[PurePosixPath]: """Query the docker API to get the workdir of an image.""" config = await self.get_image_config(image) if config is None: @@ -165,11 +192,11 @@ async def image_workdir(self, image: "Image") -> Optional[PurePosixPath]: workdir = "/" return PurePosixPath(workdir) - def with_oauth2_token(self, oauth2_token: str) -> "ImageRepoDockerAPI": + def with_oauth2_token(self, oauth2_token: str) -> ImageRepoDockerAPI: """Return a docker API instance with the token as authentication.""" - return ImageRepoDockerAPI(self.hostname, oauth2_token) + return ImageRepoDockerAPI(hostname=self.hostname, scheme=self.scheme, oauth2_token=oauth2_token) - def maybe_with_oauth2_token(self, token_hostname: str | None, oauth2_token: str | None) -> "ImageRepoDockerAPI": + def maybe_with_oauth2_token(self, token_hostname: str | None, oauth2_token: str | None) -> ImageRepoDockerAPI: """Return a docker API instance with the token as authentication. The token is used only if the image hostname matches the token hostname. @@ -272,3 +299,6 @@ def build_re(*parts: str) -> re.Pattern: def repo_api(self) -> ImageRepoDockerAPI: """Get the docker API from the image.""" return ImageRepoDockerAPI(self.hostname) + + def __str__(self) -> str: + return f"{self.hostname}/{self.name}:{self.tag}" diff --git a/components/renku_data_services/notebooks/api/classes/k8s_client.py b/components/renku_data_services/notebooks/api/classes/k8s_client.py index 9ce34fff6..f2738f693 100644 --- a/components/renku_data_services/notebooks/api/classes/k8s_client.py +++ b/components/renku_data_services/notebooks/api/classes/k8s_client.py @@ -1,21 +1,23 @@ """An abstraction over the kr8s kubernetes client and the k8s-watcher.""" +from __future__ import annotations + import base64 import json -from typing import TYPE_CHECKING, Any, Generic, Optional, TypeVar, cast +from typing import Any, Generic, Optional, TypeVar, cast import httpx -import kubernetes from box import Box from kr8s import NotFoundError, ServerError from kr8s.asyncio.objects import APIObject, Pod, Secret, StatefulSet -from kubernetes.client import V1Secret from renku_data_services.base_models import APIUser from renku_data_services.crc.db import ResourcePoolRepository from renku_data_services.errors import errors +from renku_data_services.k8s.client_interfaces import SecretClient +from renku_data_services.k8s.clients import K8sClusterClientsPool from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER, ClusterId -from renku_data_services.k8s.models import GVK, Cluster, K8sObject, K8sObjectFilter, K8sObjectMeta +from renku_data_services.k8s.models import GVK, ClusterConnection, K8sObject, K8sObjectFilter, K8sObjectMeta, K8sSecret from renku_data_services.notebooks.api.classes.auth import GitlabToken, RenkuTokens from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK from renku_data_services.notebooks.crs import AmaltheaSessionV1Alpha1, JupyterServerV1Alpha1 @@ -23,11 +25,6 @@ from renku_data_services.notebooks.util.kubernetes_ import find_env_var from renku_data_services.notebooks.util.retries import retry_with_exponential_backoff_async -if TYPE_CHECKING: - from renku_data_services.k8s.clients import K8sClusterClientsPool - -sanitizer = kubernetes.client.ApiClient().sanitize_for_serialization - # NOTE The type ignore below is because the kr8s library has no type stubs, they claim pyright better handles type hints class JupyterServerV1Alpha1Kr8s(APIObject): @@ -45,18 +42,20 @@ class JupyterServerV1Alpha1Kr8s(APIObject): _SessionType = TypeVar("_SessionType", JupyterServerV1Alpha1, AmaltheaSessionV1Alpha1) -class NotebookK8sClient(Generic[_SessionType]): +class NotebookK8sClient(SecretClient, Generic[_SessionType]): """A K8s Client for Notebooks.""" def __init__( self, - client: "K8sClusterClientsPool", + client: K8sClusterClientsPool, + secrets_client: SecretClient, rp_repo: ResourcePoolRepository, session_type: type[_SessionType], username_label: str, gvk: GVK, ) -> None: self.__client = client + self.__secrets_client = secrets_client self.__rp_repo = rp_repo self.__session_type: type[_SessionType] = session_type self.__session_gvk = gvk @@ -168,16 +167,17 @@ async def _get(self, name: str, gvk: GVK, safe_username: str | None) -> K8sObjec return None - def namespace(self) -> str: + async def namespace(self) -> str: """Current namespace of the main cluster.""" - return self.__client.cluster_by_id(self.cluster_id()).namespace + client = await self.__client.cluster_by_id(self.cluster_id()) + return client.namespace @staticmethod def cluster_id() -> ClusterId: """Cluster id of the main cluster.""" return DEFAULT_K8S_CLUSTER - async def cluster_by_class_id(self, class_id: int | None, api_user: APIUser) -> Cluster: + async def cluster_by_class_id(self, class_id: int | None, api_user: APIUser) -> ClusterConnection: """Return the cluster associated with the given resource class id.""" cluster_id = self.cluster_id() @@ -185,11 +185,11 @@ async def cluster_by_class_id(self, class_id: int | None, api_user: APIUser) -> try: rp = await self.__rp_repo.get_resource_pool_from_class(api_user, class_id) if rp.cluster is not None: - cluster_id = ClusterId(str(rp.cluster.id)) + cluster_id = rp.cluster.id except errors.MissingResourceError: pass - return self.__client.cluster_by_id(cluster_id) + return await self.__client.cluster_by_id(cluster_id) async def list_sessions(self, safe_username: str) -> list[_SessionType]: """Get a list of sessions that belong to a user.""" @@ -275,7 +275,7 @@ async def get_statefulset(self, session_name: str, safe_username: str) -> Statef if statefulset is None: return None - cluster = self.__client.cluster_by_id(statefulset.cluster) + cluster = await self.__client.cluster_by_id(statefulset.cluster) if cluster is None: return None @@ -340,7 +340,7 @@ async def get_session_logs( if result is None: return logs - cluster = self.__client.cluster_by_id(result.cluster) + cluster = await self.__client.cluster_by_id(result.cluster) if cluster is None: return logs @@ -374,7 +374,7 @@ async def patch_image_pull_secret(self, session_name: str, gitlab_token: GitlabT if result is None: return - cluster = self.__client.cluster_by_id(result.cluster) + cluster = await self.__client.cluster_by_id(result.cluster) if cluster is None: return @@ -405,66 +405,20 @@ async def patch_image_pull_secret(self, session_name: str, gitlab_token: GitlabT "value": base64.b64encode(json.dumps(new_docker_config).encode()).decode(), } ] + await secret.patch(patch, type="json") - async def create_secret(self, secret: V1Secret, cluster: Cluster) -> V1Secret: + async def create_secret(self, secret: K8sSecret) -> K8sSecret: """Create a secret.""" - assert secret.metadata is not None + return await self.__secrets_client.create_secret(secret) - secret_obj = K8sObject( - name=secret.metadata.name, - namespace=cluster.namespace, - cluster=cluster.id, - gvk=GVK(kind=Secret.kind, version=Secret.version), - manifest=Box(sanitizer(secret)), - ) - try: - result = await self.__client.create(secret_obj) - except ServerError as err: - if err.response and err.response.status_code == 409: - annotations: Box | None = secret_obj.manifest.metadata.get("annotations") - labels: Box | None = secret_obj.manifest.metadata.get("labels") - patches = [ - { - "op": "replace", - "path": "/data", - "value": secret.data or {}, - }, - { - "op": "replace", - "path": "/stringData", - "value": secret.string_data or {}, - }, - { - "op": "replace", - "path": "/metadata/annotations", - "value": annotations.to_dict() if annotations is not None else {}, - }, - { - "op": "replace", - "path": "/metadata/labels", - "value": labels.to_dict() if labels is not None else {}, - }, - ] - result = await self.__client.patch(secret_obj, patches) - else: - raise - return V1Secret( - metadata=result.manifest.metadata, - data=result.manifest.get("data", {}), - string_data=result.manifest.get("stringData", {}), - type=result.manifest.get("type"), - ) + async def patch_secret(self, secret: K8sObjectMeta, patch: dict[str, Any] | list[dict[str, Any]]) -> K8sObject: + """Patch a secret.""" - async def delete_secret(self, name: str, cluster: Cluster) -> None: + return await self.__secrets_client.patch_secret(secret, patch) + + async def delete_secret(self, secret: K8sObjectMeta) -> None: """Delete a secret.""" - await self.__client.delete( - K8sObjectMeta( - name=name, - namespace=cluster.namespace, - cluster=cluster.id, - gvk=GVK(kind=Secret.kind, version=Secret.version), - ) - ) + return await self.__secrets_client.delete_secret(secret) diff --git a/components/renku_data_services/notebooks/api/classes/server.py b/components/renku_data_services/notebooks/api/classes/server.py index 6451eddbf..b62b13898 100644 --- a/components/renku_data_services/notebooks/api/classes/server.py +++ b/components/renku_data_services/notebooks/api/classes/server.py @@ -11,6 +11,7 @@ from renku_data_services.app_config import logging from renku_data_services.base_models import AnonymousAPIUser, AuthenticatedAPIUser from renku_data_services.base_models.core import APIUser +from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER from renku_data_services.notebooks.api.amalthea_patches import cloudstorage as cloudstorage_patches from renku_data_services.notebooks.api.amalthea_patches import general as general_patches from renku_data_services.notebooks.api.amalthea_patches import git_proxy as git_proxy_patches @@ -24,8 +25,9 @@ from renku_data_services.notebooks.api.classes.repository import GitProvider, Repository from renku_data_services.notebooks.api.schemas.secrets import K8sUserSecrets from renku_data_services.notebooks.api.schemas.server_options import ServerOptions -from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.config import GitProviderHelperProto, NotebooksConfig from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK +from renku_data_services.notebooks.cr_amalthea_session import TlsSecret from renku_data_services.notebooks.crs import JupyterServerV1Alpha1 from renku_data_services.notebooks.errors.programming import DuplicateEnvironmentVariableError from renku_data_services.notebooks.errors.user import MissingResourceError @@ -51,6 +53,8 @@ def __init__( config: NotebooksConfig, internal_gitlab_user: APIUser, host: str, + namespace: str, + git_provider_helper: GitProviderHelperProto, using_default_image: bool = False, is_image_private: bool = False, repositories: list[Repository] | None = None, @@ -69,7 +73,9 @@ def __init__( self.cloudstorage = cloudstorage self.is_image_private = is_image_private self.host = host + self.__namespace = namespace self.config = config + self.git_provider_helper = git_provider_helper self.internal_gitlab_user = internal_gitlab_user if self.server_options.idle_threshold_seconds is not None: @@ -99,7 +105,7 @@ def __init__( def k8s_namespace(self) -> str: """Get the preferred namespace for a server.""" - return self._k8s_client.namespace() + return self.__namespace @property def user(self) -> AnonymousAPIUser | AuthenticatedAPIUser: @@ -126,7 +132,7 @@ async def repositories(self) -> list[Repository]: async def git_providers(self) -> list[GitProvider]: """The list of git providers.""" if self._git_providers is None: - self._git_providers = await self.config.git_provider_helper.get_providers(user=self.user) + self._git_providers = await self.git_provider_helper.get_providers(user=self.user) return self._git_providers async def required_git_providers(self) -> list[GitProvider]: @@ -231,16 +237,25 @@ async def _get_session_manifest(self) -> dict[str, Any]: } cluster = await self.config.k8s_client.cluster_by_class_id(self.server_options.resource_class_id, self._user) - ( - base_server_path, - base_server_url, - base_server_https_url, - host, - tls_secret, - ingress_annotations, - ) = await cluster.get_ingress_parameters( - self._user, self.config.cluster_rp, self.config.sessions.ingress, self.server_name - ) + + if cluster.id != DEFAULT_K8S_CLUSTER: + cluster_settings = await self.config.cluster_rp.select(cluster.id) + ( + base_server_path, + _, + _, + host, + tls_secret, + ingress_annotations, + ) = cluster_settings.get_ingress_parameters(self.server_name) + else: + # Fallback to global, main cluster parameters + host = self.config.sessions.ingress.host + base_server_path = self.config.sessions.ingress.base_path(self.server_name) + ingress_annotations = self.config.sessions.ingress.annotations + + tls_name = self.config.sessions.ingress.tls_secret + tls_secret = None if tls_name is None else TlsSecret(adopt=False, name=tls_name) # Combine everything into the manifest manifest = { @@ -315,7 +330,7 @@ async def _get_patches(self) -> list[dict[str, Any]]: # Cloud Storage needs to patch the git clone sidecar spec and so should come after # the sidecars # WARN: this patch depends on the index of the sidecar and so needs to be updated - # if sidercars are added or removed + # if sidecars are added or removed await cloudstorage_patches.main(self), # NOTE: User secrets adds an init container, volume and mounts, so it may affect # indices in other patches. @@ -394,6 +409,8 @@ def __init__( work_dir: PurePosixPath, config: NotebooksConfig, host: str, + namespace: str, + git_provider_helper: GitProviderHelperProto, gitlab_project: Project | None, internal_gitlab_user: APIUser, using_default_image: bool = False, @@ -422,10 +439,12 @@ def __init__( k8s_client=k8s_client, workspace_mount_path=workspace_mount_path, work_dir=work_dir, + git_provider_helper=git_provider_helper, using_default_image=using_default_image, is_image_private=is_image_private, repositories=repositories, host=host, + namespace=namespace, config=config, internal_gitlab_user=internal_gitlab_user, ) diff --git a/components/renku_data_services/notebooks/apispec.py b/components/renku_data_services/notebooks/apispec.py index 8f5673816..65f56bac7 100644 --- a/components/renku_data_services/notebooks/apispec.py +++ b/components/renku_data_services/notebooks/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2025-04-17T14:26:28+00:00 +# timestamp: 2025-09-12T06:56:48+00:00 from __future__ import annotations @@ -281,6 +281,18 @@ class SessionCloudStoragePost(BaseAPISpec): ) +class ImageConnectionStatus(Enum): + connected = "connected" + pending = "pending" + invalid_credentials = "invalid_credentials" + + +class ImageProvider(BaseAPISpec): + id: str + name: str + url: str + + class NotebooksImagesGetParametersQuery(BaseAPISpec): image_url: str = Field(..., min_length=1) @@ -356,6 +368,12 @@ class SessionResources(BaseAPISpec): requests: Optional[SessionResourcesRequests] = None +class ImageConnection(BaseAPISpec): + id: str + provider_id: str + status: ImageConnectionStatus + + class NotebookResponse(BaseAPISpec): annotations: Optional[FieldUserPodAnnotations] = None cloudstorage: Optional[List[LaunchNotebookResponseCloudStorage]] = None @@ -428,3 +446,9 @@ class SessionResponse(BaseAPISpec): class SessionListResponse(RootModel[List[SessionResponse]]): root: List[SessionResponse] + + +class ImageCheckResponse(BaseAPISpec): + accessible: bool = Field(..., description="Whether the image is accessible or not.") + connection: Optional[ImageConnection] = None + provider: Optional[ImageProvider] = None diff --git a/components/renku_data_services/notebooks/blueprints.py b/components/renku_data_services/notebooks/blueprints.py index b45a1c15a..aa48c0702 100644 --- a/components/renku_data_services/notebooks/blueprints.py +++ b/components/renku_data_services/notebooks/blueprints.py @@ -1,80 +1,42 @@ """Notebooks service API.""" from dataclasses import dataclass -from pathlib import PurePosixPath from sanic import Request, empty, exceptions, json from sanic.response import HTTPResponse, JSONResponse from sanic_ext import validate -from ulid import ULID from renku_data_services import base_models +from renku_data_services.app_config import logging from renku_data_services.base_api.auth import authenticate, authenticate_2 from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint from renku_data_services.base_models import AnonymousAPIUser, APIUser, AuthenticatedAPIUser, Authenticator from renku_data_services.base_models.metrics import MetricsService +from renku_data_services.connected_services.db import ConnectedServicesRepository +from renku_data_services.connected_services.models import ConnectionStatus from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository from renku_data_services.data_connectors.db import ( DataConnectorRepository, DataConnectorSecretRepository, ) from renku_data_services.errors import errors -from renku_data_services.notebooks import apispec, core -from renku_data_services.notebooks.api.amalthea_patches.init_containers import user_secrets_container +from renku_data_services.notebooks import apispec, core, image_check +from renku_data_services.notebooks.api.classes.image import Image from renku_data_services.notebooks.api.schemas.config_server_options import ServerOptionsEndpointResponse from renku_data_services.notebooks.api.schemas.logs import ServerLogs -from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.config import GitProviderHelperProto, NotebooksConfig from renku_data_services.notebooks.core_sessions import ( - get_auth_secret_anonymous, - get_auth_secret_authenticated, - get_culling, - get_data_sources, - get_extra_containers, - get_extra_init_containers, - get_gitlab_image_pull_secret, - get_launcher_env_variables, patch_session, - repositories_from_project, - request_dc_secret_creation, - request_session_secret_creation, - requires_image_pull_secret, - resources_from_resource_class, - verify_launcher_env_variable_overrides, -) -from renku_data_services.notebooks.crs import ( - AmaltheaSessionSpec, - AmaltheaSessionV1Alpha1, - Authentication, - AuthenticationType, - ExtraVolume, - ExtraVolumeMount, - ImagePullPolicy, - ImagePullSecret, - Ingress, - InitContainer, - Metadata, - ReconcileStrategy, - Session, - SessionEnvItem, - ShmSizeStr, - SizeStr, - Storage, + start_session, ) from renku_data_services.notebooks.errors.intermittent import AnonymousUserPatchError -from renku_data_services.notebooks.models import ExtraSecret -from renku_data_services.notebooks.util.kubernetes_ import ( - renku_2_make_server_name, -) -from renku_data_services.notebooks.utils import ( - node_affinity_from_resource_class, - tolerations_from_resource_class, -) from renku_data_services.project.db import ProjectRepository, ProjectSessionSecretRepository -from renku_data_services.repositories.db import GitRepositoriesRepository from renku_data_services.session.db import SessionRepository from renku_data_services.storage.db import StorageRepository from renku_data_services.users.db import UserRepo +logger = logging.getLogger(__name__) + @dataclass(kw_only=True) class NotebooksBP(CustomBlueprint): @@ -82,11 +44,11 @@ class NotebooksBP(CustomBlueprint): authenticator: Authenticator nb_config: NotebooksConfig - git_repo: GitRepositoriesRepository internal_gitlab_authenticator: base_models.Authenticator rp_repo: ResourcePoolRepository user_repo: UserRepo storage_repo: StorageRepository + git_provider_helper: GitProviderHelperProto def version(self) -> BlueprintFactoryResponse: """Return notebook services version.""" @@ -139,6 +101,7 @@ async def _launch_notebook( body, user_repo=self.user_repo, storage_repo=self.storage_repo, + git_provider_helper=self.git_provider_helper, ) return core.serialize_v1_server(server, self.nb_config, status_code) @@ -242,6 +205,8 @@ class NotebooksNewBP(CustomBlueprint): data_connector_secret_repo: DataConnectorSecretRepository metrics: MetricsService cluster_repo: ClusterRepository + connected_svcs_repo: ConnectedServicesRepository + git_provider_helper: GitProviderHelperProto def start(self) -> BlueprintFactoryResponse: """Start a session with the new operator.""" @@ -254,247 +219,25 @@ async def _handler( internal_gitlab_user: APIUser, body: apispec.SessionPostRequest, ) -> JSONResponse: - launcher = await self.session_repo.get_launcher(user, ULID.from_str(body.launcher_id)) - project = await self.project_repo.get_project(user=user, project_id=launcher.project_id) - # We have to use body.resource_class_id and not launcher.resource_class_id as it may have been overridden by - # the user when selecting a different resource class from a different resource pool. - cluster = await self.nb_config.k8s_v2_client.cluster_by_class_id(body.resource_class_id, user) - server_name = renku_2_make_server_name( - user=user, project_id=str(launcher.project_id), launcher_id=body.launcher_id, cluster_id=cluster.id - ) - existing_session = await self.nb_config.k8s_v2_client.get_session(server_name, user.id) - if existing_session is not None and existing_session.spec is not None: - return json(existing_session.as_apispec().model_dump(exclude_none=True, mode="json")) - environment = launcher.environment - image = environment.container_image - image_workdir = await core.docker_image_workdir( - self.nb_config, environment.container_image, internal_gitlab_user - ) - default_resource_class = await self.rp_repo.get_default_resource_class() - if default_resource_class.id is None: - raise errors.ProgrammingError(message="The default resource class has to have an ID", quiet=True) - if body.resource_class_id is None: - resource_pool = await self.rp_repo.get_default_resource_pool() - resource_class = resource_pool.get_default_resource_class() - if not resource_class and len(resource_pool.classes) > 0: - resource_class = resource_pool.classes[0] - if not resource_class or not resource_class.id: - raise errors.ProgrammingError(message="There cannot find any resource classes in the default pool.") - else: - resource_pool = await self.rp_repo.get_resource_pool_from_class(user, body.resource_class_id) - resource_class = resource_pool.get_resource_class(body.resource_class_id) - if not resource_class or not resource_class.id: - raise errors.MissingResourceError( - message=f"The resource class with ID {body.resource_class_id} does not exist." - ) - await self.nb_config.crc_validator.validate_class_storage(user, resource_class.id, body.disk_storage) - work_dir_fallback = PurePosixPath("/home/jovyan") - work_dir = environment.working_directory or image_workdir or work_dir_fallback - storage_mount_fallback = work_dir / "work" - storage_mount = launcher.environment.mount_directory or storage_mount_fallback - secrets_mount_directory = storage_mount / project.secrets_mount_directory - session_secrets = await self.project_session_secret_repo.get_all_session_secrets_from_project( - user=user, project_id=project.id - ) - data_connectors_stream = self.data_connector_secret_repo.get_data_connectors_with_secrets(user, project.id) - git_providers = await self.nb_config.git_provider_helper.get_providers(user=user) - repositories = repositories_from_project(project, git_providers) - - # User secrets - extra_volume_mounts: list[ExtraVolumeMount] = [] - extra_volumes: list[ExtraVolume] = [] - extra_init_containers: list[InitContainer] = [] - user_secrets_container_patches = user_secrets_container( + session, created = await start_session( + request=request, + body=body, user=user, - config=self.nb_config, - secrets_mount_directory=secrets_mount_directory.as_posix(), - k8s_secret_name=f"{server_name}-secrets", - session_secrets=session_secrets, - ) - if user_secrets_container_patches is not None: - (init_container_session_secret, volumes_session_secret, volume_mounts_session_secret) = ( - user_secrets_container_patches - ) - extra_volumes.extend(volumes_session_secret) - extra_volume_mounts.extend(volume_mounts_session_secret) - extra_init_containers.append(init_container_session_secret) - - secrets_to_create: list[ExtraSecret] = [] - data_sources, data_secrets, enc_secrets = await get_data_sources( + internal_gitlab_user=internal_gitlab_user, nb_config=self.nb_config, - server_name=server_name, - user=user, - data_connectors_stream=data_connectors_stream, - work_dir=work_dir, - cloud_storage_overrides=body.cloudstorage or [], + git_provider_helper=self.git_provider_helper, + cluster_repo=self.cluster_repo, + data_connector_secret_repo=self.data_connector_secret_repo, + project_repo=self.project_repo, + project_session_secret_repo=self.project_session_secret_repo, + rp_repo=self.rp_repo, + session_repo=self.session_repo, user_repo=self.user_repo, + metrics=self.metrics, + connected_svcs_repo=self.connected_svcs_repo, ) - secrets_to_create.extend(data_secrets) - extra_init_containers_dc, extra_init_volumes_dc = await get_extra_init_containers( - self.nb_config, - user, - repositories, - git_providers, - storage_mount, - work_dir, - uid=environment.uid, - gid=environment.gid, - ) - extra_containers = await get_extra_containers(self.nb_config, user, repositories, git_providers) - extra_volumes.extend(extra_init_volumes_dc) - extra_init_containers.extend(extra_init_containers_dc) - - ( - base_server_path, - base_server_url, - base_server_https_url, - host, - tls_secret, - ingress_annotations, - ) = await cluster.get_ingress_parameters( - user, self.cluster_repo, self.nb_config.sessions.ingress, server_name - ) - - ui_path = f"{base_server_path}/{environment.default_url.lstrip('/')}" - - ingress = Ingress( - host=host, - ingressClassName=ingress_annotations.get("kubernetes.io/ingress.class"), - annotations=ingress_annotations, - tlsSecret=tls_secret, - pathPrefix=base_server_path, - ) - - annotations: dict[str, str] = { - "renku.io/project_id": str(launcher.project_id), - "renku.io/launcher_id": body.launcher_id, - "renku.io/resource_class_id": str(body.resource_class_id or default_resource_class.id), - } - if isinstance(user, AuthenticatedAPIUser): - auth_secret = await get_auth_secret_authenticated( - self.nb_config, user, server_name, base_server_url, base_server_https_url, base_server_path - ) - else: - auth_secret = await get_auth_secret_anonymous(self.nb_config, server_name, request) - if auth_secret.volume: - extra_volumes.append(auth_secret.volume) - - image_pull_secret_name = None - if isinstance(user, AuthenticatedAPIUser) and internal_gitlab_user.access_token is not None: - needs_pull_secret = await requires_image_pull_secret(self.nb_config, image, internal_gitlab_user) - - if needs_pull_secret: - image_pull_secret_name = f"{server_name}-image-secret" - - image_secret = get_gitlab_image_pull_secret( - self.nb_config, user, image_pull_secret_name, internal_gitlab_user.access_token - ) - secrets_to_create.append(image_secret) - - secrets_to_create.append(auth_secret) - - # Raise an error if there are invalid environment variables in the request body - verify_launcher_env_variable_overrides(launcher, body) - env = [ - SessionEnvItem(name="RENKU_BASE_URL_PATH", value=base_server_path), - SessionEnvItem(name="RENKU_BASE_URL", value=base_server_url), - SessionEnvItem(name="RENKU_MOUNT_DIR", value=storage_mount.as_posix()), - SessionEnvItem(name="RENKU_SESSION", value="1"), - SessionEnvItem(name="RENKU_SESSION_IP", value="0.0.0.0"), # nosec B104 - SessionEnvItem(name="RENKU_SESSION_PORT", value=f"{environment.port}"), - SessionEnvItem(name="RENKU_WORKING_DIR", value=work_dir.as_posix()), - ] - launcher_env_variables = get_launcher_env_variables(launcher, body) - if launcher_env_variables: - env.extend(launcher_env_variables) - - storage_class = await cluster.get_storage_class( - user, self.cluster_repo, self.nb_config.sessions.storage.pvs_storage_class - ) - service_account_name: str | None = None - if resource_pool.cluster: - service_account_name = resource_pool.cluster.service_account_name - manifest = AmaltheaSessionV1Alpha1( - metadata=Metadata(name=server_name, annotations=annotations), - spec=AmaltheaSessionSpec( - imagePullSecrets=[ImagePullSecret(name=image_pull_secret_name, adopt=True)] - if image_pull_secret_name - else [], - codeRepositories=[], - hibernated=False, - reconcileStrategy=ReconcileStrategy.whenFailedOrHibernated, - priorityClassName=resource_class.quota, - session=Session( - image=image, - imagePullPolicy=ImagePullPolicy.Always, - urlPath=ui_path, - port=environment.port, - storage=Storage( - className=storage_class, - size=SizeStr(str(body.disk_storage) + "G"), - mountPath=storage_mount.as_posix(), - ), - workingDir=work_dir.as_posix(), - runAsUser=environment.uid, - runAsGroup=environment.gid, - resources=resources_from_resource_class(resource_class), - extraVolumeMounts=extra_volume_mounts, - command=environment.command, - args=environment.args, - shmSize=ShmSizeStr("1G"), - env=env, - ), - ingress=ingress, - extraContainers=extra_containers, - initContainers=extra_init_containers, - extraVolumes=extra_volumes, - culling=get_culling(user, resource_pool, self.nb_config), - authentication=Authentication( - enabled=True, - type=AuthenticationType.oauth2proxy - if isinstance(user, AuthenticatedAPIUser) - else AuthenticationType.token, - secretRef=auth_secret.key_ref("auth"), - extraVolumeMounts=[auth_secret.volume_mount] if auth_secret.volume_mount else [], - ), - dataSources=data_sources, - tolerations=tolerations_from_resource_class( - resource_class, self.nb_config.sessions.tolerations_model - ), - affinity=node_affinity_from_resource_class(resource_class, self.nb_config.sessions.affinity_model), - serviceAccountName=service_account_name, - ), - ) - for s in secrets_to_create: - await self.nb_config.k8s_v2_client.create_secret(s.secret, cluster) - try: - manifest = await self.nb_config.k8s_v2_client.create_session(manifest, user) - except Exception as err: - for s in secrets_to_create: - await self.nb_config.k8s_v2_client.delete_secret(s.secret.metadata.name, cluster) - raise errors.ProgrammingError(message="Could not start the amalthea session") from err - else: - try: - await request_session_secret_creation(user, self.nb_config, manifest, session_secrets) - await request_dc_secret_creation(user, self.nb_config, manifest, enc_secrets) - except Exception: - await self.nb_config.k8s_v2_client.delete_session(server_name, user.id) - raise - - await self.metrics.user_requested_session_launch( - user=user, - metadata={ - "cpu": int(resource_class.cpu * 1000), - "memory": resource_class.memory, - "gpu": resource_class.gpu, - "storage": body.disk_storage, - "resource_class_id": resource_class.id, - "resource_pool_id": resource_pool.id or "", - "resource_class_name": f"{resource_pool.name}.{resource_class.name}", - "session_id": server_name, - }, - ) - return json(manifest.as_apispec().model_dump(mode="json", exclude_none=True), 201) + status = 201 if created else 200 + return json(session.as_apispec().model_dump(exclude_none=True, mode="json"), status) return "/sessions", ["POST"], _handler @@ -547,14 +290,18 @@ async def _handler( body: apispec.SessionPatchRequest, ) -> HTTPResponse: new_session = await patch_session( - body, - session_id, - self.nb_config, - user, - internal_gitlab_user, - rp_repo=self.rp_repo, + body=body, + session_id=session_id, + user=user, + internal_gitlab_user=internal_gitlab_user, + nb_config=self.nb_config, + git_provider_helper=self.git_provider_helper, project_repo=self.project_repo, + project_session_secret_repo=self.project_session_secret_repo, + rp_repo=self.rp_repo, + session_repo=self.session_repo, metrics=self.metrics, + connected_svcs_repo=self.connected_svcs_repo, ) return json(new_session.as_apispec().model_dump(exclude_none=True, mode="json")) @@ -586,12 +333,39 @@ async def _check_docker_image( user: AnonymousAPIUser | AuthenticatedAPIUser, internal_gitlab_user: APIUser, query: apispec.SessionsImagesGetParametersQuery, - ) -> HTTPResponse: - image_url = request.get_args().get("image_url") - if not isinstance(image_url, str): - raise ValueError("required string of image url") + ) -> JSONResponse: + image = Image.from_path(query.image_url) + result = await image_check.check_image( + image, + user, + self.connected_svcs_repo, + image_check.InternalGitLabConfig(internal_gitlab_user, self.nb_config), + ) + logger.info(f"Checked image {query.image_url}: {result}") + conn = None + if result.connection: + match result.connection.status: + case ConnectionStatus.connected: + if result.error is not None: + status = apispec.ImageConnectionStatus.invalid_credentials + else: + status = apispec.ImageConnectionStatus.connected + + case ConnectionStatus.pending: + status = apispec.ImageConnectionStatus.pending + + conn = apispec.ImageConnection( + id=str(result.connection.id), provider_id=result.connection.provider_id, status=status + ) - status = 200 if await core.docker_image_exists(self.nb_config, image_url, internal_gitlab_user) else 404 - return HTTPResponse(status=status) + provider: apispec.ImageProvider | None = None + if result.client: + provider = apispec.ImageProvider( + id=result.client.id, name=result.client.display_name, url=result.client.url + ) + + resp = apispec.ImageCheckResponse(accessible=result.accessible, connection=conn, provider=provider) + + return json(resp.model_dump(exclude_none=True, mode="json")) return "/sessions/images", ["GET"], _check_docker_image diff --git a/components/renku_data_services/notebooks/config/__init__.py b/components/renku_data_services/notebooks/config/__init__.py index 875b675de..042e5eff0 100644 --- a/components/renku_data_services/notebooks/config/__init__.py +++ b/components/renku_data_services/notebooks/config/__init__.py @@ -1,6 +1,7 @@ """Base notebooks svc configuration.""" import os +from collections.abc import Awaitable from dataclasses import dataclass, field from typing import Any, Optional, Protocol, Self @@ -8,23 +9,22 @@ from renku_data_services.base_models import APIUser from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository -from renku_data_services.crc.models import ResourceClass +from renku_data_services.crc.models import ClusterSettings, ResourceClass, SessionProtocol from renku_data_services.db_config.config import DBConfig +from renku_data_services.errors import errors from renku_data_services.k8s.clients import ( DummyCoreClient, DummySchedulingClient, K8sClusterClientsPool, K8sCoreClient, K8sSchedulingClient, + K8sSecretClient, ) -from renku_data_services.k8s.config import KubeConfigEnv, get_clusters -from renku_data_services.k8s.quota import QuotaRepository -from renku_data_services.k8s_watcher import K8sDbCache +from renku_data_services.k8s.config import KubeConfig, KubeConfigEnv, get_clusters +from renku_data_services.k8s.db import K8sDbCache, QuotaRepository from renku_data_services.notebooks.api.classes.data_service import ( CRCValidator, DummyCRCValidator, - DummyGitProviderHelper, - GitProviderHelper, ) from renku_data_services.notebooks.api.classes.k8s_client import NotebookK8sClient from renku_data_services.notebooks.api.classes.repository import GitProvider @@ -104,9 +104,36 @@ def __getattribute__(self, name: str) -> Any: return object.__getattribute__(self.current, name) +class TestKubeConfig(KubeConfig): + """Kubeconfig used for testing.""" + + def __init__( + self, + kubeconfig: str | None = None, + current_context_name: str | None = None, + ns: str | None = None, + sa: str | None = None, + url: str | None = None, + ) -> None: + super().__init__(kubeconfig, current_context_name, ns, sa, url) + self.__stack = Kr8sApiStack() + + def sync_api(self) -> kr8s.Api: + """Instantiate the sync Kr8s Api object based on the configuration.""" + return self.__stack # type: ignore[return-value] + + def api(self) -> Awaitable[kr8s.asyncio.Api]: + """Instantiate the async Kr8s Api object based on the configuration.""" + + async def _api() -> kr8s.asyncio.Api: + return self.__stack # type: ignore[return-value] + + return _api() + + @dataclass class NotebooksConfig: - """The notebooks configuration.""" + """The notebooks' configuration.""" server_options: ServerOptionsConfig sessions: _SessionConfig @@ -114,14 +141,13 @@ class NotebooksConfig: git: _GitConfig k8s: _K8sConfig k8s_db_cache: K8sDbCache - _kr8s_api: kr8s.asyncio.Api cloud_storage: _CloudStorage user_secrets: _UserSecrets crc_validator: CRCValidatorProto - git_provider_helper: GitProviderHelperProto k8s_client: NotebookK8sClient[JupyterServerV1Alpha1] k8s_v2_client: NotebookK8sClient[AmaltheaSessionV1Alpha1] cluster_rp: ClusterRepository + enable_internal_gitlab: bool current_resource_schema_version: int = 1 anonymous_sessions_enabled: bool = False ssh_enabled: bool = False @@ -134,57 +160,56 @@ class NotebooksConfig: default_factory=_ServersGetEndpointAnnotations ) session_id_cookie_name: str = "_renku_session" # NOTE: This cookie name is set and controlled by the gateway + v1_sessions_enabled: bool = False + local_cluster_session_service_account: str | None = None @classmethod - def from_env(cls, db_config: DBConfig) -> Self: + def from_env(cls, db_config: DBConfig, enable_internal_gitlab: bool) -> Self: """Create a configuration object from environment variables.""" + enable_internal_gitlab = os.getenv("ENABLE_INTERNAL_GITLAB", "false").lower() == "true" dummy_stores = _parse_str_as_bool(os.environ.get("DUMMY_STORES", False)) sessions_config: _SessionConfig git_config: _GitConfig - kr8s_api: kr8s.asyncio.Api + default_kubeconfig = KubeConfigEnv() data_service_url = os.environ.get("NB_DATA_SERVICE_URL", "http://127.0.0.1:8000") server_options = ServerOptionsConfig.from_env() crc_validator: CRCValidatorProto - git_provider_helper: GitProviderHelperProto k8s_namespace = os.environ.get("K8S_NAMESPACE", "default") kube_config_root = os.environ.get("K8S_CONFIGS_ROOT", "/secrets/kube_configs") + v1_sessions_enabled = _parse_str_as_bool(os.environ.get("V1_SESSIONS_ENABLED", False)) if dummy_stores: quota_repo = QuotaRepository(DummyCoreClient({}, {}), DummySchedulingClient({}), namespace=k8s_namespace) rp_repo = ResourcePoolRepository(db_config.async_session_maker, quota_repo) crc_validator = DummyCRCValidator() sessions_config = _SessionConfig._for_testing() - git_provider_helper = DummyGitProviderHelper() git_config = _GitConfig("http://not.specified", "registry.not.specified") - kr8s_api = Kr8sApiStack() # type: ignore[assignment] + else: quota_repo = QuotaRepository(K8sCoreClient(), K8sSchedulingClient(), namespace=k8s_namespace) rp_repo = ResourcePoolRepository(db_config.async_session_maker, quota_repo) crc_validator = CRCValidator(rp_repo) sessions_config = _SessionConfig.from_env() - git_config = _GitConfig.from_env() - git_provider_helper = GitProviderHelper( - data_service_url, f"http://{sessions_config.ingress.host}", git_config.url - ) - # NOTE: we need to get an async client as a sync client can't be used in an async way - # But all the config code is not async, so we need to drop into the running loop, if there is one - kr8s_api = KubeConfigEnv().api() + git_config = _GitConfig.from_env(enable_internal_gitlab=enable_internal_gitlab) k8s_config = _K8sConfig.from_env() k8s_db_cache = K8sDbCache(db_config.async_session_maker) cluster_rp = ClusterRepository(db_config.async_session_maker) + client = K8sClusterClientsPool( - get_clusters=get_clusters( + get_clusters( kube_conf_root_dir=kube_config_root, - namespace=k8s_config.renku_namespace, - api=kr8s_api, - cluster_rp=cluster_rp, - ), - cache=k8s_db_cache, - kinds_to_cache=[AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK, BUILD_RUN_GVK, TASK_RUN_GVK], + default_kubeconfig=default_kubeconfig, + cluster_repo=cluster_rp, + cache=k8s_db_cache, + kinds_to_cache=[AMALTHEA_SESSION_GVK, JUPYTER_SESSION_GVK, BUILD_RUN_GVK, TASK_RUN_GVK], + ) ) + secrets_client = K8sSecretClient(client) + k8s_client = NotebookK8sClient( client=client, + secrets_client=secrets_client, rp_repo=rp_repo, session_type=JupyterServerV1Alpha1, gvk=JUPYTER_SESSION_GVK, @@ -192,12 +217,14 @@ def from_env(cls, db_config: DBConfig) -> Self: ) k8s_v2_client = NotebookK8sClient( client=client, + secrets_client=secrets_client, rp_repo=rp_repo, # NOTE: v2 sessions have no userId label, the safe-username label is the keycloak user ID session_type=AmaltheaSessionV1Alpha1, gvk=AMALTHEA_SESSION_GVK, username_label="renku.io/safe-username", ) + return cls( server_options=server_options, sessions=sessions_config, @@ -214,10 +241,28 @@ def from_env(cls, db_config: DBConfig) -> Self: data_service_url=data_service_url, dummy_stores=dummy_stores, crc_validator=crc_validator, - git_provider_helper=git_provider_helper, k8s_client=k8s_client, k8s_v2_client=k8s_v2_client, k8s_db_cache=k8s_db_cache, cluster_rp=cluster_rp, - _kr8s_api=kr8s_api, + v1_sessions_enabled=v1_sessions_enabled, + enable_internal_gitlab=enable_internal_gitlab, + local_cluster_session_service_account=os.environ.get("LOCAL_CLUSTER_SESSION_SERVICE_ACCOUNT"), + ) + + def local_cluster_settings(self) -> ClusterSettings: + """The cluster settings for the local cluster where the Renku services are installed.""" + if not self.sessions.ingress.tls_secret: + raise errors.ProgrammingError(message="The tls secret must be defined for a local cluster.") + return ClusterSettings( + name="local-cluster-settings", + config_name="", + session_protocol=SessionProtocol.HTTPS, + session_host=self.sessions.ingress.host, + session_port=443, + session_path="/sessions", + session_ingress_annotations=self.sessions.ingress.annotations, + session_tls_secret_name=self.sessions.ingress.tls_secret, + session_storage_class=self.sessions.storage.pvs_storage_class, + service_account_name=self.local_cluster_session_service_account, ) diff --git a/components/renku_data_services/notebooks/config/dynamic.py b/components/renku_data_services/notebooks/config/dynamic.py index 945aaec21..2faea93bd 100644 --- a/components/renku_data_services/notebooks/config/dynamic.py +++ b/components/renku_data_services/notebooks/config/dynamic.py @@ -5,7 +5,7 @@ from dataclasses import dataclass, field from enum import Enum from io import StringIO -from typing import Any, ClassVar, Optional, Self, Union +from typing import Any, ClassVar, Self, Union from urllib.parse import urlunparse import yaml @@ -100,16 +100,18 @@ class _GitConfig: registry: str @classmethod - def from_env(cls) -> Self: - return cls(os.environ["NB_GIT__URL"], os.environ["NB_GIT__REGISTRY"]) + def from_env(cls, enable_internal_gitlab: bool = True) -> Self: + if enable_internal_gitlab: + return cls(os.environ["NB_GIT__URL"], os.environ["NB_GIT__REGISTRY"]) + return cls("", "") @dataclass class _GitProxyConfig: renku_client_secret: str = field(repr=False) sentry: _SentryConfig = field(default_factory=_SentryConfig.from_env) - port: int = 8080 - health_port: int = 8081 + port: int = 65480 + health_port: int = 65481 image: str = f"renku/git-https-proxy:{latest_version}" renku_client_id: str = "renku" @@ -119,8 +121,8 @@ def from_env(cls) -> Self: renku_client_secret=os.environ["NB_SESSIONS__GIT_PROXY__RENKU_CLIENT_SECRET"], renku_client_id=os.environ.get("NB_SESSIONS__GIT_PROXY__RENKU_CLIENT_ID", "renku"), sentry=_SentryConfig.from_env(prefix="NB_SESSIONS__GIT_PROXY__"), - port=_parse_value_as_int(os.environ.get("NB_SESSIONS__GIT_PROXY__PORT", 8080)), - health_port=_parse_value_as_int(os.environ.get("NB_SESSIONS__GIT_PROXY__HEALTH_PORT", 8081)), + port=_parse_value_as_int(os.environ.get("NB_SESSIONS__GIT_PROXY__PORT", 65480)), + health_port=_parse_value_as_int(os.environ.get("NB_SESSIONS__GIT_PROXY__HEALTH_PORT", 65481)), image=os.environ.get("NB_SESSIONS__GIT_PROXY__IMAGE", f"renku/git-https-proxy:{latest_version}"), ) @@ -253,14 +255,14 @@ def from_env(cls) -> Self: @dataclass class _SessionIngress: host: str - tls_secret: Optional[str] = None + tls_secret: str | None = None annotations: dict[str, str] = field(default_factory=dict) @classmethod def from_env(cls) -> Self: return cls( host=os.environ["NB_SESSIONS__INGRESS__HOST"], - tls_secret=os.environ.get("NB_SESSIONS__INGRESS__TLS_SECRET", None), + tls_secret=os.environ["NB_SESSIONS__INGRESS__TLS_SECRET"], annotations=yaml.safe_load(StringIO(os.environ.get("NB_SESSIONS__INGRESS__ANNOTATIONS", "{}"))), ) @@ -430,7 +432,7 @@ def _for_testing(cls) -> Self: git_proxy=_GitProxyConfig(renku_client_secret="not-defined"), # nosec B106 git_rpc_server=_GitRpcServerConfig.from_env(), git_clone=_GitCloneConfig.from_env(), - ingress=_SessionIngress(host="localhost"), + ingress=_SessionIngress(host="localhost", tls_secret="some-secret"), # nosec: B106 ca_certs=_CustomCaCertsConfig.from_env(), oidc=_SessionOidcConfig( client_id="not-defined", diff --git a/components/renku_data_services/notebooks/core.py b/components/renku_data_services/notebooks/core.py index f5d09a62a..1bdbd5784 100644 --- a/components/renku_data_services/notebooks/core.py +++ b/components/renku_data_services/notebooks/core.py @@ -1,5 +1,6 @@ """Notebooks service core implementation, specifically for JupyterServer sessions.""" +import contextlib import json as json_lib from datetime import UTC, datetime from math import floor @@ -29,7 +30,7 @@ from renku_data_services.notebooks.api.schemas.server_options import ServerOptions from renku_data_services.notebooks.api.schemas.servers_get import NotebookResponse from renku_data_services.notebooks.api.schemas.servers_patch import PatchServerStatusEnum -from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.config import GitProviderHelperProto, NotebooksConfig from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK from renku_data_services.notebooks.errors import intermittent from renku_data_services.notebooks.errors import user as user_errors @@ -331,6 +332,7 @@ async def launch_notebook_helper( internal_gitlab_user: APIUser, user_repo: UserRepo, storage_repo: StorageRepository, + git_provider_helper: GitProviderHelperProto, ) -> tuple[UserServerManifest, int]: """Helper function to launch a Jupyter server.""" @@ -339,6 +341,9 @@ async def launch_notebook_helper( if server: return UserServerManifest(server, nb_config.sessions.default_image, nb_config.sessions.storage.pvs_enabled), 200 + if not nb_config.v1_sessions_enabled: + raise errors.ForbiddenError(message="Starting v1 sessions is not allowed.") + gl_project_path = gl_project_path if gl_project_path is not None else "" # Add annotation for old and new notebooks @@ -394,17 +399,15 @@ async def launch_notebook_helper( host = nb_config.sessions.ingress.host parsed_server_options: ServerOptions | None = None + session_namespace = nb_config.k8s.renku_namespace if resource_class_id is not None: # A resource class ID was passed in, validate with CRC service parsed_server_options = await nb_config.crc_validator.validate_class_storage(user, resource_class_id, storage) - k8s_cluster = await nb_config.k8s_client.cluster_by_class_id(resource_class_id, user) - if ( - p := await k8s_cluster.get_ingress_parameters( - user, nb_config.cluster_rp, nb_config.sessions.ingress, server_name - ) - ) is not None: - (_, _, _, ingress_host, _, _) = p - host = ingress_host + cluster = await nb_config.k8s_client.cluster_by_class_id(resource_class_id, user) + session_namespace = cluster.namespace + with contextlib.suppress(errors.MissingResourceError): + (_, _, _, host, _, _) = (await nb_config.cluster_rp.select(cluster.id)).get_ingress_parameters(server_name) + elif server_options is not None: if isinstance(server_options, dict): requested_server_options = ServerOptions( @@ -516,6 +519,8 @@ async def launch_notebook_helper( repositories=[Repository.from_dict(r.model_dump()) for r in repositories], config=nb_config, host=host, + namespace=session_namespace, + git_provider_helper=git_provider_helper, **extra_kwargs, ) @@ -591,6 +596,7 @@ async def launch_notebook( launch_request: apispec.LaunchNotebookRequestOld, user_repo: UserRepo, storage_repo: StorageRepository, + git_provider_helper: GitProviderHelperProto, ) -> tuple[UserServerManifest, int]: """Starts a server using the old operator.""" @@ -606,7 +612,7 @@ async def launch_notebook( launch_request.project, launch_request.branch, launch_request.commit_sha, - cluster.id, + str(cluster.id), ) project_slug = f"{launch_request.namespace}/{launch_request.project}" gitlab_client = NotebooksGitlabClient(config.git.url, internal_gitlab_user.access_token) @@ -649,6 +655,7 @@ async def launch_notebook( internal_gitlab_user=internal_gitlab_user, user_repo=user_repo, storage_repo=storage_repo, + git_provider_helper=git_provider_helper, ) diff --git a/components/renku_data_services/notebooks/core_sessions.py b/components/renku_data_services/notebooks/core_sessions.py index 6a5c76df1..0d18224ff 100644 --- a/components/renku_data_services/notebooks/core_sessions.py +++ b/components/renku_data_services/notebooks/core_sessions.py @@ -5,62 +5,91 @@ import os import random import string -from collections.abc import AsyncIterator +from collections.abc import AsyncIterator, Sequence from datetime import timedelta from pathlib import PurePosixPath -from typing import cast +from typing import Protocol, TypeVar, cast from urllib.parse import urljoin, urlparse import httpx from kubernetes.client import V1ObjectMeta, V1Secret from sanic import Request from toml import dumps +from ulid import ULID from yaml import safe_dump +import renku_data_services.notebooks.image_check as ic from renku_data_services.app_config import logging -from renku_data_services.base_models import APIUser -from renku_data_services.base_models.core import AnonymousAPIUser, AuthenticatedAPIUser +from renku_data_services.base_models import AnonymousAPIUser, APIUser, AuthenticatedAPIUser from renku_data_services.base_models.metrics import MetricsService -from renku_data_services.crc.db import ResourcePoolRepository -from renku_data_services.crc.models import GpuKind, ResourceClass, ResourcePool +from renku_data_services.connected_services.db import ConnectedServicesRepository +from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository +from renku_data_services.crc.models import ( + ClusterSettings, + GpuKind, + RemoteConfigurationFirecrest, + ResourceClass, + ResourcePool, +) +from renku_data_services.data_connectors.db import ( + DataConnectorSecretRepository, +) from renku_data_services.data_connectors.models import DataConnectorSecret, DataConnectorWithSecrets from renku_data_services.errors import errors -from renku_data_services.notebooks import apispec +from renku_data_services.k8s.models import K8sSecret, sanitizer +from renku_data_services.notebooks import apispec, core from renku_data_services.notebooks.api.amalthea_patches import git_proxy, init_containers +from renku_data_services.notebooks.api.amalthea_patches.init_containers import user_secrets_extras from renku_data_services.notebooks.api.classes.image import Image -from renku_data_services.notebooks.api.classes.k8s_client import sanitizer from renku_data_services.notebooks.api.classes.repository import GitProvider, Repository from renku_data_services.notebooks.api.schemas.cloud_storage import RCloneStorage -from renku_data_services.notebooks.config import NotebooksConfig +from renku_data_services.notebooks.config import GitProviderHelperProto, NotebooksConfig from renku_data_services.notebooks.crs import ( + AmaltheaSessionSpec, AmaltheaSessionV1Alpha1, + AmaltheaSessionV1Alpha1MetadataPatch, AmaltheaSessionV1Alpha1Patch, AmaltheaSessionV1Alpha1SpecPatch, AmaltheaSessionV1Alpha1SpecSessionPatch, + Authentication, + AuthenticationType, Culling, DataSource, ExtraContainer, ExtraVolume, ExtraVolumeMount, + ImagePullPolicy, ImagePullSecret, + Ingress, InitContainer, Limits, LimitsStr, + Metadata, + ReconcileStrategy, Requests, RequestsStr, Resources, SecretAsVolume, SecretAsVolumeItem, + Session, SessionEnvItem, + SessionLocation, + ShmSizeStr, + SizeStr, State, + Storage, +) +from renku_data_services.notebooks.models import ExtraSecret, SessionExtraResources +from renku_data_services.notebooks.util.kubernetes_ import ( + renku_2_make_server_name, ) -from renku_data_services.notebooks.models import ExtraSecret from renku_data_services.notebooks.utils import ( node_affinity_from_resource_class, tolerations_from_resource_class, ) -from renku_data_services.project.db import ProjectRepository +from renku_data_services.project.db import ProjectRepository, ProjectSessionSecretRepository from renku_data_services.project.models import Project, SessionSecret +from renku_data_services.session.db import SessionRepository from renku_data_services.session.models import SessionLauncher from renku_data_services.users.db import UserRepo from renku_data_services.utils.cryptography import get_encryption_key @@ -77,8 +106,9 @@ async def get_extra_init_containers( work_dir: PurePosixPath, uid: int = 1000, gid: int = 1000, -) -> tuple[list[InitContainer], list[ExtraVolume]]: +) -> SessionExtraResources: """Get all extra init containers that should be added to an amalthea session.""" + # TODO: The above statement is not correct: the init container for user secrets is not included here cert_init, cert_vols = init_containers.certificates_container(nb_config) session_init_containers = [InitContainer.model_validate(sanitizer(cert_init))] extra_volumes = [ExtraVolume.model_validate(sanitizer(volume)) for volume in cert_vols] @@ -94,7 +124,10 @@ async def get_extra_init_containers( ) if git_clone is not None: session_init_containers.append(InitContainer.model_validate(git_clone)) - return session_init_containers, extra_volumes + return SessionExtraResources( + init_containers=session_init_containers, + volumes=extra_volumes, + ) async def get_extra_containers( @@ -102,7 +135,7 @@ async def get_extra_containers( user: AnonymousAPIUser | AuthenticatedAPIUser, repositories: list[Repository], git_providers: list[GitProvider], -) -> list[ExtraContainer]: +) -> SessionExtraResources: """Get the extra containers added to amalthea sessions.""" conts: list[ExtraContainer] = [] git_proxy_container = await git_proxy.main_container( @@ -110,7 +143,7 @@ async def get_extra_containers( ) if git_proxy_container: conts.append(ExtraContainer.model_validate(sanitizer(git_proxy_container))) - return conts + return SessionExtraResources(containers=conts) async def get_auth_secret_authenticated( @@ -159,7 +192,7 @@ async def get_auth_secret_authenticated( return ExtraSecret(secret, vol, vol_mount) -async def get_auth_secret_anonymous(nb_config: NotebooksConfig, server_name: str, request: Request) -> ExtraSecret: +def get_auth_secret_anonymous(nb_config: NotebooksConfig, server_name: str, request: Request) -> ExtraSecret: """Get the extra secrets that need to be added to the session for an anonymous user.""" # NOTE: We extract the session cookie value here in order to avoid creating a cookie. # The gateway encrypts and signs cookies so the user ID injected in the request headers does not @@ -186,7 +219,7 @@ async def get_auth_secret_anonymous(nb_config: NotebooksConfig, server_name: str return ExtraSecret(secret) -def get_gitlab_image_pull_secret( +def __get_gitlab_image_pull_secret( nb_config: NotebooksConfig, user: AuthenticatedAPIUser, image_pull_secret_name: str, access_token: str ) -> ExtraSecret: """Create a Kubernetes secret for private GitLab registry authentication.""" @@ -222,7 +255,7 @@ async def get_data_sources( work_dir: PurePosixPath, cloud_storage_overrides: list[apispec.SessionCloudStoragePost], user_repo: UserRepo, -) -> tuple[list[DataSource], list[ExtraSecret], dict[str, list[DataConnectorSecret]]]: +) -> SessionExtraResources: """Generate cloud storage related resources.""" data_sources: list[DataSource] = [] secrets: list[ExtraSecret] = [] @@ -258,7 +291,7 @@ async def get_data_sources( if csr_id not in dcs: raise errors.MissingResourceError( message=f"You have requested a cloud storage with ID {csr_id} which does not exist " - "or you dont have access to." + "or you don't have access to." ) if csr.target_path is not None and not PurePosixPath(csr.target_path).is_absolute(): csr.target_path = (work_dir / csr.target_path).as_posix() @@ -280,7 +313,7 @@ async def get_data_sources( secret = ExtraSecret( cs.secret( secret_name, - nb_config.k8s_client.namespace(), + await nb_config.k8s_client.namespace(), user_secret_key=user_secret_key if secret_key_needed else None, ) ) @@ -292,7 +325,11 @@ async def get_data_sources( accessMode="ReadOnlyMany" if cs.readonly else "ReadWriteOnce", ) ) - return data_sources, secrets, dcs_secrets + return SessionExtraResources( + data_sources=data_sources, + secrets=secrets, + data_connector_secrets=dcs_secrets, + ) async def request_dc_secret_creation( @@ -312,15 +349,23 @@ async def request_dc_secret_creation( } secrets_url = nb_config.user_secrets.secrets_storage_service_url + "/api/secrets/kubernetes" headers = {"Authorization": f"bearer {user.access_token}"} + + cluster_id = None + namespace = await nb_config.k8s_v2_client.namespace() + if (cluster := await nb_config.k8s_v2_client.cluster_by_class_id(manifest.resource_class_id(), user)) is not None: + cluster_id = cluster.id + namespace = cluster.namespace + for s_id, secrets in dc_secrets.items(): if len(secrets) == 0: continue request_data = { "name": f"{manifest.metadata.name}-ds-{s_id.lower()}-secrets", - "namespace": nb_config.k8s_v2_client.namespace(), + "namespace": namespace, "secret_ids": [str(secret.secret_id) for secret in secrets], "owner_references": [owner_reference], "key_mapping": {str(secret.secret_id): secret.name for secret in secrets}, + "cluster_id": str(cluster_id), } async with httpx.AsyncClient(timeout=10) as client: res = await client.post(secrets_url, headers=headers, json=request_data) @@ -379,12 +424,20 @@ async def request_session_secret_creation( if secret_id not in key_mapping: key_mapping[secret_id] = list() key_mapping[secret_id].append(s.secret_slot.filename) + + cluster_id = None + namespace = await nb_config.k8s_v2_client.namespace() + if (cluster := await nb_config.k8s_v2_client.cluster_by_class_id(manifest.resource_class_id(), user)) is not None: + cluster_id = cluster.id + namespace = cluster.namespace + request_data = { "name": f"{manifest.metadata.name}-secrets", - "namespace": nb_config.k8s_v2_client.namespace(), + "namespace": namespace, "secret_ids": [str(s.secret_id) for s in session_secrets], "owner_references": [owner_reference], "key_mapping": key_mapping, + "cluster_id": str(cluster_id), } secrets_url = nb_config.user_secrets.secrets_storage_service_url + "/api/secrets/kubernetes" headers = {"Authorization": f"bearer {user.access_token}"} @@ -465,7 +518,7 @@ def get_culling( ) -async def requires_image_pull_secret(nb_config: NotebooksConfig, image: str, internal_gitlab_user: APIUser) -> bool: +async def __requires_image_pull_secret(nb_config: NotebooksConfig, image: str, internal_gitlab_user: APIUser) -> bool: """Determines if an image requires a pull secret based on its visibility and their GitLab access token.""" parsed_image = Image.from_path(image) @@ -484,14 +537,440 @@ async def requires_image_pull_secret(nb_config: NotebooksConfig, image: str, int return False +def __format_image_pull_secret(secret_name: str, access_token: str, registry_domain: str) -> ExtraSecret: + registry_secret = { + "auths": {registry_domain: {"auth": base64.b64encode(f"oauth2:{access_token}".encode()).decode()}} + } + registry_secret = json.dumps(registry_secret) + registry_secret = base64.b64encode(registry_secret.encode()).decode() + return ExtraSecret( + V1Secret( + data={".dockerconfigjson": registry_secret}, + metadata=V1ObjectMeta(name=secret_name), + type="kubernetes.io/dockerconfigjson", + ) + ) + + +async def __get_connected_services_image_pull_secret( + secret_name: str, connected_svcs_repo: ConnectedServicesRepository, image: str, user: APIUser +) -> ExtraSecret | None: + """Return a secret for accessing the image if one is available for the given user.""" + image_parsed = Image.from_path(image) + image_check_result = await ic.check_image(image_parsed, user, connected_svcs_repo, None) + logger.debug(f"Set pull secret for {image} to connection {image_check_result.image_provider}") + if not image_check_result.token: + return None + + if not image_check_result.image_provider: + return None + + return __format_image_pull_secret( + secret_name=secret_name, + access_token=image_check_result.token, + registry_domain=image_check_result.image_provider.registry_url, + ) + + +async def get_image_pull_secret( + image: str, + server_name: str, + nb_config: NotebooksConfig, + user: APIUser, + internal_gitlab_user: APIUser, + connected_svcs_repo: ConnectedServicesRepository, +) -> ExtraSecret | None: + """Get an image pull secret.""" + + v2_secret = await __get_connected_services_image_pull_secret( + f"{server_name}-image-secret", connected_svcs_repo, image, user + ) + if v2_secret: + return v2_secret + + if ( + nb_config.enable_internal_gitlab + and isinstance(user, AuthenticatedAPIUser) + and internal_gitlab_user.access_token is not None + ): + needs_pull_secret = await __requires_image_pull_secret(nb_config, image, internal_gitlab_user) + if needs_pull_secret: + v1_secret = __get_gitlab_image_pull_secret( + nb_config, user, f"{server_name}-image-secret-v1", internal_gitlab_user.access_token + ) + return v1_secret + + return None + + +def get_remote_secret( + user: AuthenticatedAPIUser | AnonymousAPIUser, + config: NotebooksConfig, + server_name: str, + remote_provider_id: str, + git_providers: list[GitProvider], +) -> ExtraSecret | None: + """Returns the secret containing the configuration for the remote session controller.""" + if not user.is_authenticated or user.access_token is None or user.refresh_token is None: + return None + remote_provider = next(filter(lambda p: p.id == remote_provider_id, git_providers), None) + if not remote_provider: + return None + renku_base_url = "https://" + config.sessions.ingress.host + renku_base_url = renku_base_url.rstrip("/") + renku_auth_token_uri = f"{renku_base_url}/auth/realms/{config.keycloak_realm}/protocol/openid-connect/token" + secret_data = { + "RSC_AUTH_KIND": "renku", + "RSC_AUTH_TOKEN_URI": remote_provider.access_token_url, + "RSC_AUTH_RENKU_ACCESS_TOKEN": user.access_token, + "RSC_AUTH_RENKU_REFRESH_TOKEN": user.refresh_token, + "RSC_AUTH_RENKU_TOKEN_URI": renku_auth_token_uri, + "RSC_AUTH_RENKU_CLIENT_ID": config.sessions.git_proxy.renku_client_id, + "RSC_AUTH_RENKU_CLIENT_SECRET": config.sessions.git_proxy.renku_client_secret, + } + secret_name = f"{server_name}-remote-secret" + secret = V1Secret(metadata=V1ObjectMeta(name=secret_name), string_data=secret_data) + return ExtraSecret(secret) + + +def get_remote_env( + remote: RemoteConfigurationFirecrest, +) -> list[SessionEnvItem]: + """Returns env variables used for remote sessions.""" + env = [ + SessionEnvItem(name="RSC_REMOTE_KIND", value=remote.kind.value), + SessionEnvItem(name="RSC_FIRECREST_API_URL", value=remote.api_url), + SessionEnvItem(name="RSC_FIRECREST_SYSTEM_NAME", value=remote.system_name), + ] + if remote.partition: + env.append(SessionEnvItem(name="RSC_FIRECREST_PARTITION", value=remote.partition)) + return env + + +async def start_session( + request: Request, + body: apispec.SessionPostRequest, + user: AnonymousAPIUser | AuthenticatedAPIUser, + internal_gitlab_user: APIUser, + nb_config: NotebooksConfig, + git_provider_helper: GitProviderHelperProto, + cluster_repo: ClusterRepository, + data_connector_secret_repo: DataConnectorSecretRepository, + project_repo: ProjectRepository, + project_session_secret_repo: ProjectSessionSecretRepository, + rp_repo: ResourcePoolRepository, + session_repo: SessionRepository, + user_repo: UserRepo, + metrics: MetricsService, + connected_svcs_repo: ConnectedServicesRepository, +) -> tuple[AmaltheaSessionV1Alpha1, bool]: + """Start an Amalthea session. + + Returns a tuple where the first item is an instance of an Amalthea session + and the second item is a boolean set to true iff a new session was created. + """ + launcher = await session_repo.get_launcher(user, ULID.from_str(body.launcher_id)) + project = await project_repo.get_project(user=user, project_id=launcher.project_id) + + # Determine resource_class_id: the class can be overwritten at the user's request + resource_class_id = body.resource_class_id or launcher.resource_class_id + + cluster = await nb_config.k8s_v2_client.cluster_by_class_id(resource_class_id, user) + + server_name = renku_2_make_server_name( + user=user, project_id=str(launcher.project_id), launcher_id=body.launcher_id, cluster_id=str(cluster.id) + ) + existing_session = await nb_config.k8s_v2_client.get_session(server_name, user.id) + if existing_session is not None and existing_session.spec is not None: + return existing_session, False + + # Fully determine the resource pool and resource class + if resource_class_id is None: + resource_pool = await rp_repo.get_default_resource_pool() + resource_class = resource_pool.get_default_resource_class() + if not resource_class and len(resource_pool.classes) > 0: + resource_class = resource_pool.classes[0] + if not resource_class or not resource_class.id: + raise errors.ProgrammingError(message="Cannot find any resource classes in the default pool.") + resource_class_id = resource_class.id + else: + resource_pool = await rp_repo.get_resource_pool_from_class(user, resource_class_id) + resource_class = resource_pool.get_resource_class(resource_class_id) + if not resource_class or not resource_class.id: + raise errors.MissingResourceError(message=f"The resource class with ID {resource_class_id} does not exist.") + await nb_config.crc_validator.validate_class_storage(user, resource_class.id, body.disk_storage) + + # Determine session location + session_location = SessionLocation.remote if resource_pool.remote else SessionLocation.local + if session_location == SessionLocation.remote and not user.is_authenticated: + raise errors.ValidationError(message="Anonymous users cannot start remote sessions.") + + environment = launcher.environment + image = environment.container_image + work_dir = environment.working_directory + if not work_dir: + image_workdir = await core.docker_image_workdir(nb_config, environment.container_image, internal_gitlab_user) + work_dir_fallback = PurePosixPath("/home/jovyan") + work_dir = image_workdir or work_dir_fallback + storage_mount_fallback = work_dir / "work" + storage_mount = launcher.environment.mount_directory or storage_mount_fallback + secrets_mount_directory = storage_mount / project.secrets_mount_directory + session_secrets = await project_session_secret_repo.get_all_session_secrets_from_project( + user=user, project_id=project.id + ) + data_connectors_stream = data_connector_secret_repo.get_data_connectors_with_secrets(user, project.id) + git_providers = await git_provider_helper.get_providers(user=user) + repositories = repositories_from_project(project, git_providers) + + # User secrets + session_extras = SessionExtraResources() + session_extras = session_extras.concat( + user_secrets_extras( + user=user, + config=nb_config, + secrets_mount_directory=secrets_mount_directory.as_posix(), + k8s_secret_name=f"{server_name}-secrets", + session_secrets=session_secrets, + ) + ) + + # Data connectors + session_extras = session_extras.concat( + await get_data_sources( + nb_config=nb_config, + server_name=server_name, + user=user, + data_connectors_stream=data_connectors_stream, + work_dir=work_dir, + cloud_storage_overrides=body.cloudstorage or [], + user_repo=user_repo, + ) + ) + + # More init containers + session_extras = session_extras.concat( + await get_extra_init_containers( + nb_config, + user, + repositories, + git_providers, + storage_mount, + work_dir, + uid=environment.uid, + gid=environment.gid, + ) + ) + + # Extra containers + session_extras = session_extras.concat(await get_extra_containers(nb_config, user, repositories, git_providers)) + + # Cluster settings (ingress, storage class, etc) + cluster_settings: ClusterSettings + try: + cluster_settings = await cluster_repo.select(cluster.id) + except errors.MissingResourceError: + # Fallback to global, main cluster parameters + cluster_settings = nb_config.local_cluster_settings() + + ( + base_server_path, + base_server_url, + base_server_https_url, + host, + tls_secret, + ingress_annotations, + ) = cluster_settings.get_ingress_parameters(server_name) + storage_class = cluster_settings.get_storage_class() + service_account_name = cluster_settings.service_account_name + + ui_path = f"{base_server_path}/{environment.default_url.lstrip('/')}" + + ingress = Ingress( + host=host, + ingressClassName=ingress_annotations.get("kubernetes.io/ingress.class"), + annotations=ingress_annotations, + tlsSecret=tls_secret, + pathPrefix=base_server_path, + ) + + # Annotations + annotations: dict[str, str] = { + "renku.io/project_id": str(launcher.project_id), + "renku.io/launcher_id": body.launcher_id, + "renku.io/resource_class_id": str(resource_class_id), + } + + # Authentication + if isinstance(user, AuthenticatedAPIUser): + auth_secret = await get_auth_secret_authenticated( + nb_config, user, server_name, base_server_url, base_server_https_url, base_server_path + ) + else: + auth_secret = get_auth_secret_anonymous(nb_config, server_name, request) + session_extras = session_extras.concat( + SessionExtraResources( + secrets=[auth_secret], + volumes=[auth_secret.volume] if auth_secret.volume else [], + ) + ) + authn_extra_volume_mounts: list[ExtraVolumeMount] = [] + if auth_secret.volume_mount: + authn_extra_volume_mounts.append(auth_secret.volume_mount) + + cert_vol_mounts = init_containers.certificates_volume_mounts(nb_config) + if cert_vol_mounts: + authn_extra_volume_mounts.extend(cert_vol_mounts) + + image_secret = await get_image_pull_secret( + image=image, + server_name=server_name, + nb_config=nb_config, + user=user, + internal_gitlab_user=internal_gitlab_user, + connected_svcs_repo=connected_svcs_repo, + ) + if image_secret: + session_extras = session_extras.concat(SessionExtraResources(secrets=[image_secret])) + + # Remote session configuration + remote_secret = None + if session_location == SessionLocation.remote: + assert resource_pool.remote is not None + if resource_pool.remote.provider_id is None: + raise errors.ProgrammingError( + message=f"The resource pool {resource_pool.id} configuration is not valid (missing field 'remote_provider_id')." # noqa E501 + ) + remote_secret = get_remote_secret( + user=user, + config=nb_config, + server_name=server_name, + remote_provider_id=resource_pool.remote.provider_id, + git_providers=git_providers, + ) + if remote_secret is not None: + session_extras = session_extras.concat(SessionExtraResources(secrets=[remote_secret])) + + # Raise an error if there are invalid environment variables in the request body + verify_launcher_env_variable_overrides(launcher, body) + env = [ + SessionEnvItem(name="RENKU_BASE_URL_PATH", value=base_server_path), + SessionEnvItem(name="RENKU_BASE_URL", value=base_server_url), + SessionEnvItem(name="RENKU_MOUNT_DIR", value=storage_mount.as_posix()), + SessionEnvItem(name="RENKU_SESSION", value="1"), + SessionEnvItem(name="RENKU_SESSION_IP", value="0.0.0.0"), # nosec B104 + SessionEnvItem(name="RENKU_SESSION_PORT", value=f"{environment.port}"), + SessionEnvItem(name="RENKU_WORKING_DIR", value=work_dir.as_posix()), + SessionEnvItem(name="RENKU_SECRETS_PATH", value=project.secrets_mount_directory.as_posix()), + SessionEnvItem(name="RENKU_PROJECT_ID", value=str(project.id)), + SessionEnvItem(name="RENKU_PROJECT_PATH", value=project.path.serialize()), + SessionEnvItem(name="RENKU_LAUNCHER_ID", value=str(launcher.id)), + ] + if session_location == SessionLocation.remote: + assert resource_pool.remote is not None + env.extend( + get_remote_env( + remote=resource_pool.remote, + ) + ) + launcher_env_variables = get_launcher_env_variables(launcher, body) + env.extend(launcher_env_variables) + + session = AmaltheaSessionV1Alpha1( + metadata=Metadata(name=server_name, annotations=annotations), + spec=AmaltheaSessionSpec( + location=session_location, + imagePullSecrets=[ImagePullSecret(name=image_secret.name, adopt=True)] if image_secret else [], + codeRepositories=[], + hibernated=False, + reconcileStrategy=ReconcileStrategy.whenFailedOrHibernated, + priorityClassName=resource_class.quota, + session=Session( + image=image, + imagePullPolicy=ImagePullPolicy.Always, + urlPath=ui_path, + port=environment.port, + storage=Storage( + className=storage_class, + size=SizeStr(str(body.disk_storage) + "G"), + mountPath=storage_mount.as_posix(), + ), + workingDir=work_dir.as_posix(), + runAsUser=environment.uid, + runAsGroup=environment.gid, + resources=resources_from_resource_class(resource_class), + extraVolumeMounts=session_extras.volume_mounts, + command=environment.command, + args=environment.args, + shmSize=ShmSizeStr("1G"), + stripURLPath=environment.strip_path_prefix, + env=env, + remoteSecretRef=remote_secret.ref() if remote_secret else None, + ), + ingress=ingress, + extraContainers=session_extras.containers, + initContainers=session_extras.init_containers, + extraVolumes=session_extras.volumes, + culling=get_culling(user, resource_pool, nb_config), + authentication=Authentication( + enabled=True, + type=AuthenticationType.oauth2proxy + if isinstance(user, AuthenticatedAPIUser) + else AuthenticationType.token, + secretRef=auth_secret.key_ref("auth"), + extraVolumeMounts=authn_extra_volume_mounts, + ), + dataSources=session_extras.data_sources, + tolerations=tolerations_from_resource_class(resource_class, nb_config.sessions.tolerations_model), + affinity=node_affinity_from_resource_class(resource_class, nb_config.sessions.affinity_model), + serviceAccountName=service_account_name, + ), + ) + secrets_to_create = session_extras.secrets or [] + for s in secrets_to_create: + await nb_config.k8s_v2_client.create_secret(K8sSecret.from_v1_secret(s.secret, cluster)) + try: + session = await nb_config.k8s_v2_client.create_session(session, user) + except Exception as err: + for s in secrets_to_create: + await nb_config.k8s_v2_client.delete_secret(K8sSecret.from_v1_secret(s.secret, cluster)) + raise errors.ProgrammingError(message="Could not start the amalthea session") from err + else: + try: + await request_session_secret_creation(user, nb_config, session, session_secrets) + data_connector_secrets = session_extras.data_connector_secrets or dict() + await request_dc_secret_creation(user, nb_config, session, data_connector_secrets) + except Exception: + await nb_config.k8s_v2_client.delete_session(server_name, user.id) + raise + + await metrics.user_requested_session_launch( + user=user, + metadata={ + "cpu": int(resource_class.cpu * 1000), + "memory": resource_class.memory, + "gpu": resource_class.gpu, + "storage": body.disk_storage, + "resource_class_id": resource_class.id, + "resource_pool_id": resource_pool.id or "", + "resource_class_name": f"{resource_pool.name}.{resource_class.name}", + "session_id": server_name, + }, + ) + return session, True + + async def patch_session( body: apispec.SessionPatchRequest, session_id: str, - nb_config: NotebooksConfig, user: AnonymousAPIUser | AuthenticatedAPIUser, internal_gitlab_user: APIUser, - rp_repo: ResourcePoolRepository, + nb_config: NotebooksConfig, + git_provider_helper: GitProviderHelperProto, project_repo: ProjectRepository, + project_session_secret_repo: ProjectSessionSecretRepository, + rp_repo: ResourcePoolRepository, + session_repo: SessionRepository, + connected_svcs_repo: ConnectedServicesRepository, metrics: MetricsService, ) -> AmaltheaSessionV1Alpha1: """Patch an Amalthea session.""" @@ -543,6 +1022,11 @@ async def patch_session( raise errors.MissingResourceError( message=f"The resource class you requested with ID {body.resource_class_id} does not exist" ) + # TODO: reject session classes which change the cluster + if not patch.metadata: + patch.metadata = AmaltheaSessionV1Alpha1MetadataPatch() + # Patch the resource class ID in the annotations + patch.metadata.annotations = {"renku.io/resource_class_id": str(body.resource_class_id)} if not patch.spec.session: patch.spec.session = AmaltheaSessionV1Alpha1SpecSessionPatch() patch.spec.session.resources = resources_from_resource_class(rc) @@ -562,48 +1046,93 @@ async def patch_session( if is_getting_hibernated: return await nb_config.k8s_v2_client.patch_session(session_id, user.id, patch.to_rfc7386()) - # Patching the extra containers (includes the git proxy) - git_providers = await nb_config.git_provider_helper.get_providers(user) - repositories = await repositories_from_session(user, session, project_repo, git_providers) - extra_containers = await get_extra_containers( - nb_config, - user, - repositories, - git_providers, + server_name = session.metadata.name + launcher = await session_repo.get_launcher(user, session.launcher_id) + project = await project_repo.get_project(user=user, project_id=session.project_id) + environment = launcher.environment + work_dir = environment.working_directory + if not work_dir: + image_workdir = await core.docker_image_workdir(nb_config, environment.container_image, internal_gitlab_user) + work_dir_fallback = PurePosixPath("/home/jovyan") + work_dir = image_workdir or work_dir_fallback + storage_mount_fallback = work_dir / "work" + storage_mount = launcher.environment.mount_directory or storage_mount_fallback + secrets_mount_directory = storage_mount / project.secrets_mount_directory + session_secrets = await project_session_secret_repo.get_all_session_secrets_from_project( + user=user, project_id=project.id + ) + git_providers = await git_provider_helper.get_providers(user=user) + repositories = repositories_from_project(project, git_providers) + + # User secrets + session_extras = SessionExtraResources() + session_extras = session_extras.concat( + user_secrets_extras( + user=user, + config=nb_config, + secrets_mount_directory=secrets_mount_directory.as_posix(), + k8s_secret_name=f"{server_name}-secrets", + session_secrets=session_secrets, + ) ) - if extra_containers: - patch.spec.extraContainers = extra_containers - # Patching the image pull secret - if isinstance(user, AuthenticatedAPIUser) and internal_gitlab_user.access_token is not None: - image = session.spec.session.image - server_name = session.metadata.name - needs_pull_secret = await requires_image_pull_secret(nb_config, image, internal_gitlab_user) - logger.info(f"Session with ID {session_id} needs pull secret for image {image}: {needs_pull_secret}") + # Data connectors: skip + # TODO: How can we patch data connectors? Should we even patch them? + # TODO: The fact that `start_session()` accepts overrides for data connectors + # TODO: but that we do not save these overrides (e.g. as annotations) means that + # TODO: we cannot patch data connectors upon resume. + # TODO: If we did, we would lose the user's provided overrides (e.g. unsaved credentials). + + # More init containers + session_extras = session_extras.concat( + await get_extra_init_containers( + nb_config, + user, + repositories, + git_providers, + storage_mount, + work_dir, + uid=environment.uid, + gid=environment.gid, + ) + ) - if needs_pull_secret: - image_pull_secret_name = f"{server_name}-image-secret" + # Extra containers + session_extras = session_extras.concat(await get_extra_containers(nb_config, user, repositories, git_providers)) - # Always create a fresh secret to ensure we have the latest token - image_secret = get_gitlab_image_pull_secret( - nb_config, user, image_pull_secret_name, internal_gitlab_user.access_token - ) + # Patching the image pull secret + image = session.spec.session.image + image_pull_secret = await get_image_pull_secret( + image=image, + server_name=server_name, + nb_config=nb_config, + connected_svcs_repo=connected_svcs_repo, + user=user, + internal_gitlab_user=internal_gitlab_user, + ) + if image_pull_secret: + session_extras.concat(SessionExtraResources(secrets=[image_pull_secret])) + patch.spec.imagePullSecrets = [ImagePullSecret(name=image_pull_secret.name, adopt=image_pull_secret.adopt)] - if not image_secret: - logger.error(f"Failed to create image pull secret for session ID {session_id} with image {image}") - raise errors.ProgrammingError( - message=f"We cannot retrive credentials for your private image {image}. " - "In order to resolve this problem, you can try to log out and back in " - "and/or check that you still have permissions for the image repository." - ) - # Ensure the secret is created in the cluster - await nb_config.k8s_v2_client.create_secret(image_secret.secret, cluster) + # Construct session patch + patch.spec.extraContainers = _make_patch_spec_list( + existing=session.spec.extraContainers or [], updated=session_extras.containers + ) + patch.spec.initContainers = _make_patch_spec_list( + existing=session.spec.initContainers or [], updated=session_extras.init_containers + ) + patch.spec.extraVolumes = _make_patch_spec_list( + existing=session.spec.extraVolumes or [], updated=session_extras.volumes + ) + if not patch.spec.session: + patch.spec.session = AmaltheaSessionV1Alpha1SpecSessionPatch() + patch.spec.session.extraVolumeMounts = _make_patch_spec_list( + existing=session.spec.session.extraVolumeMounts or [], updated=session_extras.volume_mounts + ) - updated_secrets = [ - secret for secret in (session.spec.imagePullSecrets or []) if not secret.name.endswith("-image-secret") - ] - updated_secrets.append(ImagePullSecret(name=image_pull_secret_name, adopt=True)) - patch.spec.imagePullSecrets = updated_secrets + secrets_to_create = session_extras.secrets or [] + for s in secrets_to_create: + await nb_config.k8s_v2_client.create_secret(K8sSecret.from_v1_secret(s.secret, cluster)) patch_serialized = patch.to_rfc7386() if len(patch_serialized) == 0: @@ -656,3 +1185,33 @@ def _find_mount_folder(dc: RCloneStorage) -> str: ) return result_dcs + + +class _NamedResource(Protocol): + """Represents a resource with a name.""" + + name: str + + +_T = TypeVar("_T", bound=_NamedResource) + + +def _make_patch_spec_list(existing: Sequence[_T], updated: Sequence[_T]) -> list[_T] | None: + """Merges updated into existing by upserting items identified by their name. + + This method is used to construct session patches, merging session resources by name (containers, volumes, etc.). + """ + patch_list = None + if updated: + patch_list = list(existing) + upsert_list = list(updated) + for upsert_item in upsert_list: + # Find out if the upsert_item needs to be added or updated + # found = next(enumerate(filter(lambda item: item.name == upsert_item.name, patch_list)), None) + found = next(filter(lambda t: t[1].name == upsert_item.name, enumerate(patch_list)), None) + if found is not None: + idx, _ = found + patch_list[idx] = upsert_item + else: + patch_list.append(upsert_item) + return patch_list diff --git a/components/renku_data_services/notebooks/cr_amalthea_session.py b/components/renku_data_services/notebooks/cr_amalthea_session.py index 87302cd5d..0e43b4f1f 100644 --- a/components/renku_data_services/notebooks/cr_amalthea_session.py +++ b/components/renku_data_services/notebooks/cr_amalthea_session.py @@ -1,12 +1,12 @@ # generated by datamodel-codegen: # filename: -# timestamp: 2025-07-07T13:49:34+00:00 +# timestamp: 2025-10-08T09:35:03+00:00 from __future__ import annotations from datetime import datetime, timedelta from enum import Enum -from typing import Any, Dict, List, Optional, Union +from typing import Any, Mapping, Optional, Sequence, Union from pydantic import ConfigDict, Field, RootModel from renku_data_services.notebooks.cr_base import BaseCRD @@ -21,7 +21,7 @@ class MatchExpression(BaseCRD): ..., description="Represents a key's relationship to a set of values.\nValid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.", ) - values: Optional[List[str]] = Field( + values: Optional[Sequence[str]] = Field( default=None, description="An array of string values. If the operator is In or NotIn,\nthe values array must be non-empty. If the operator is Exists or DoesNotExist,\nthe values array must be empty. If the operator is Gt or Lt, the values\narray must have a single element, which will be interpreted as an integer.\nThis array is replaced during a strategic merge patch.", ) @@ -36,7 +36,7 @@ class MatchField(BaseCRD): ..., description="Represents a key's relationship to a set of values.\nValid operators are In, NotIn, Exists, DoesNotExist. Gt, and Lt.", ) - values: Optional[List[str]] = Field( + values: Optional[Sequence[str]] = Field( default=None, description="An array of string values. If the operator is In or NotIn,\nthe values array must be non-empty. If the operator is Exists or DoesNotExist,\nthe values array must be empty. If the operator is Gt or Lt, the values\narray must have a single element, which will be interpreted as an integer.\nThis array is replaced during a strategic merge patch.", ) @@ -46,11 +46,11 @@ class Preference(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression]] = Field( + matchExpressions: Optional[Sequence[MatchExpression]] = Field( default=None, description="A list of node selector requirements by node's labels.", ) - matchFields: Optional[List[MatchField]] = Field( + matchFields: Optional[Sequence[MatchField]] = Field( default=None, description="A list of node selector requirements by node's fields.", ) @@ -74,11 +74,11 @@ class NodeSelectorTerm(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression]] = Field( + matchExpressions: Optional[Sequence[MatchExpression]] = Field( default=None, description="A list of node selector requirements by node's labels.", ) - matchFields: Optional[List[MatchField]] = Field( + matchFields: Optional[Sequence[MatchField]] = Field( default=None, description="A list of node selector requirements by node's fields.", ) @@ -88,7 +88,7 @@ class RequiredDuringSchedulingIgnoredDuringExecution(BaseCRD): model_config = ConfigDict( extra="allow", ) - nodeSelectorTerms: List[NodeSelectorTerm] = Field( + nodeSelectorTerms: Sequence[NodeSelectorTerm] = Field( ..., description="Required. A list of node selector terms. The terms are ORed." ) @@ -98,7 +98,7 @@ class NodeAffinity(BaseCRD): extra="allow", ) preferredDuringSchedulingIgnoredDuringExecution: Optional[ - List[PreferredDuringSchedulingIgnoredDuringExecutionItem] + Sequence[PreferredDuringSchedulingIgnoredDuringExecutionItem] ] = Field( default=None, description='The scheduler will prefer to schedule pods to nodes that satisfy\nthe affinity expressions specified by this field, but it may choose\na node that violates one or more of the expressions. The node that is\nmost preferred is the one with the greatest sum of weights, i.e.\nfor each node that meets all of the scheduling requirements (resource\nrequest, requiredDuringScheduling affinity expressions, etc.),\ncompute a sum by iterating through the elements of this field and adding\n"weight" to the sum if the node matches the corresponding matchExpressions; the\nnode(s) with the highest sum are the most preferred.', @@ -122,7 +122,7 @@ class MatchExpression2(BaseCRD): ..., description="operator represents a key's relationship to a set of values.\nValid operators are In, NotIn, Exists and DoesNotExist.", ) - values: Optional[List[str]] = Field( + values: Optional[Sequence[str]] = Field( default=None, description="values is an array of string values. If the operator is In or NotIn,\nthe values array must be non-empty. If the operator is Exists or DoesNotExist,\nthe values array must be empty. This array is replaced during a strategic\nmerge patch.", ) @@ -132,11 +132,11 @@ class LabelSelector(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -146,11 +146,11 @@ class NamespaceSelector(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -164,11 +164,11 @@ class PodAffinityTerm(BaseCRD): default=None, description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", ) - matchLabelKeys: Optional[List[str]] = Field( + matchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", ) - mismatchLabelKeys: Optional[List[str]] = Field( + mismatchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) @@ -176,7 +176,7 @@ class PodAffinityTerm(BaseCRD): default=None, description='A label query over the set of namespaces that the term applies to.\nThe term is applied to the union of the namespaces selected by this field\nand the ones listed in the namespaces field.\nnull selector and null or empty namespaces list means "this pod\'s namespace".\nAn empty selector ({}) matches all namespaces.', ) - namespaces: Optional[List[str]] = Field( + namespaces: Optional[Sequence[str]] = Field( default=None, description='namespaces specifies a static list of namespace names that the term applies to.\nThe term is applied to the union of the namespaces listed in this field\nand the ones selected by namespaceSelector.\nnull or empty namespaces list and null namespaceSelector means "this pod\'s namespace".', ) @@ -204,11 +204,11 @@ class LabelSelector1(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -218,11 +218,11 @@ class NamespaceSelector1(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -236,11 +236,11 @@ class RequiredDuringSchedulingIgnoredDuringExecutionItem(BaseCRD): default=None, description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", ) - matchLabelKeys: Optional[List[str]] = Field( + matchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", ) - mismatchLabelKeys: Optional[List[str]] = Field( + mismatchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) @@ -248,7 +248,7 @@ class RequiredDuringSchedulingIgnoredDuringExecutionItem(BaseCRD): default=None, description='A label query over the set of namespaces that the term applies to.\nThe term is applied to the union of the namespaces selected by this field\nand the ones listed in the namespaces field.\nnull selector and null or empty namespaces list means "this pod\'s namespace".\nAn empty selector ({}) matches all namespaces.', ) - namespaces: Optional[List[str]] = Field( + namespaces: Optional[Sequence[str]] = Field( default=None, description='namespaces specifies a static list of namespace names that the term applies to.\nThe term is applied to the union of the namespaces listed in this field\nand the ones selected by namespaceSelector.\nnull or empty namespaces list and null namespaceSelector means "this pod\'s namespace".', ) @@ -263,13 +263,13 @@ class PodAffinity(BaseCRD): extra="allow", ) preferredDuringSchedulingIgnoredDuringExecution: Optional[ - List[PreferredDuringSchedulingIgnoredDuringExecutionItem1] + Sequence[PreferredDuringSchedulingIgnoredDuringExecutionItem1] ] = Field( default=None, description='The scheduler will prefer to schedule pods to nodes that satisfy\nthe affinity expressions specified by this field, but it may choose\na node that violates one or more of the expressions. The node that is\nmost preferred is the one with the greatest sum of weights, i.e.\nfor each node that meets all of the scheduling requirements (resource\nrequest, requiredDuringScheduling affinity expressions, etc.),\ncompute a sum by iterating through the elements of this field and adding\n"weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the\nnode(s) with the highest sum are the most preferred.', ) requiredDuringSchedulingIgnoredDuringExecution: Optional[ - List[RequiredDuringSchedulingIgnoredDuringExecutionItem] + Sequence[RequiredDuringSchedulingIgnoredDuringExecutionItem] ] = Field( default=None, description="If the affinity requirements specified by this field are not met at\nscheduling time, the pod will not be scheduled onto the node.\nIf the affinity requirements specified by this field cease to be met\nat some point during pod execution (e.g. due to a pod label update), the\nsystem may or may not try to eventually evict the pod from its node.\nWhen there are multiple elements, the lists of nodes corresponding to each\npodAffinityTerm are intersected, i.e. all terms must be satisfied.", @@ -280,11 +280,11 @@ class LabelSelector2(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -294,11 +294,11 @@ class NamespaceSelector2(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -312,11 +312,11 @@ class PodAffinityTerm1(BaseCRD): default=None, description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", ) - matchLabelKeys: Optional[List[str]] = Field( + matchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", ) - mismatchLabelKeys: Optional[List[str]] = Field( + mismatchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) @@ -324,7 +324,7 @@ class PodAffinityTerm1(BaseCRD): default=None, description='A label query over the set of namespaces that the term applies to.\nThe term is applied to the union of the namespaces selected by this field\nand the ones listed in the namespaces field.\nnull selector and null or empty namespaces list means "this pod\'s namespace".\nAn empty selector ({}) matches all namespaces.', ) - namespaces: Optional[List[str]] = Field( + namespaces: Optional[Sequence[str]] = Field( default=None, description='namespaces specifies a static list of namespace names that the term applies to.\nThe term is applied to the union of the namespaces listed in this field\nand the ones selected by namespaceSelector.\nnull or empty namespaces list and null namespaceSelector means "this pod\'s namespace".', ) @@ -352,11 +352,11 @@ class LabelSelector3(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -366,11 +366,11 @@ class NamespaceSelector3(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -384,11 +384,11 @@ class RequiredDuringSchedulingIgnoredDuringExecutionItem1(BaseCRD): default=None, description="A label query over a set of resources, in this case pods.\nIf it's null, this PodAffinityTerm matches with no Pods.", ) - matchLabelKeys: Optional[List[str]] = Field( + matchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key in (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both matchLabelKeys and labelSelector.\nAlso, matchLabelKeys cannot be set when labelSelector isn't set.", ) - mismatchLabelKeys: Optional[List[str]] = Field( + mismatchLabelKeys: Optional[Sequence[str]] = Field( default=None, description="MismatchLabelKeys is a set of pod label keys to select which pods will\nbe taken into consideration. The keys are used to lookup values from the\nincoming pod labels, those key-value labels are merged with `labelSelector` as `key notin (value)`\nto select the group of existing pods which pods will be taken into consideration\nfor the incoming pod's pod (anti) affinity. Keys that don't exist in the incoming\npod labels will be ignored. The default value is empty.\nThe same key is forbidden to exist in both mismatchLabelKeys and labelSelector.\nAlso, mismatchLabelKeys cannot be set when labelSelector isn't set.", ) @@ -396,7 +396,7 @@ class RequiredDuringSchedulingIgnoredDuringExecutionItem1(BaseCRD): default=None, description='A label query over the set of namespaces that the term applies to.\nThe term is applied to the union of the namespaces selected by this field\nand the ones listed in the namespaces field.\nnull selector and null or empty namespaces list means "this pod\'s namespace".\nAn empty selector ({}) matches all namespaces.', ) - namespaces: Optional[List[str]] = Field( + namespaces: Optional[Sequence[str]] = Field( default=None, description='namespaces specifies a static list of namespace names that the term applies to.\nThe term is applied to the union of the namespaces listed in this field\nand the ones selected by namespaceSelector.\nnull or empty namespaces list and null namespaceSelector means "this pod\'s namespace".', ) @@ -411,13 +411,13 @@ class PodAntiAffinity(BaseCRD): extra="allow", ) preferredDuringSchedulingIgnoredDuringExecution: Optional[ - List[PreferredDuringSchedulingIgnoredDuringExecutionItem2] + Sequence[PreferredDuringSchedulingIgnoredDuringExecutionItem2] ] = Field( default=None, description='The scheduler will prefer to schedule pods to nodes that satisfy\nthe anti-affinity expressions specified by this field, but it may choose\na node that violates one or more of the expressions. The node that is\nmost preferred is the one with the greatest sum of weights, i.e.\nfor each node that meets all of the scheduling requirements (resource\nrequest, requiredDuringScheduling anti-affinity expressions, etc.),\ncompute a sum by iterating through the elements of this field and adding\n"weight" to the sum if the node has pods which matches the corresponding podAffinityTerm; the\nnode(s) with the highest sum are the most preferred.', ) requiredDuringSchedulingIgnoredDuringExecution: Optional[ - List[RequiredDuringSchedulingIgnoredDuringExecutionItem1] + Sequence[RequiredDuringSchedulingIgnoredDuringExecutionItem1] ] = Field( default=None, description="If the anti-affinity requirements specified by this field are not met at\nscheduling time, the pod will not be scheduled onto the node.\nIf the anti-affinity requirements specified by this field cease to be met\nat some point during pod execution (e.g. due to a pod label update), the\nsystem may or may not try to eventually evict the pod from its node.\nWhen there are multiple elements, the lists of nodes corresponding to each\npodAffinityTerm are intersected, i.e. all terms must be satisfied.", @@ -499,7 +499,7 @@ class Authentication(BaseCRD): extra="allow", ) enabled: bool = True - extraVolumeMounts: Optional[List[ExtraVolumeMount]] = Field( + extraVolumeMounts: Optional[Sequence[ExtraVolumeMount]] = Field( default=None, description="Additional volume mounts for the authentication container.", ) @@ -780,7 +780,7 @@ class Exec(BaseCRD): model_config = ConfigDict( extra="allow", ) - command: Optional[List[str]] = Field( + command: Optional[Sequence[str]] = Field( default=None, description="Command is the command line to execute inside the container, the working directory for the\ncommand is root ('/') in the container's filesystem. The command is simply exec'd, it is\nnot run inside a shell, so traditional shell instructions ('|', etc) won't work. To use\na shell, you need to explicitly call out to that shell.\nExit status of 0 is treated as live/healthy and non-zero is unhealthy.", ) @@ -805,7 +805,7 @@ class HttpGet(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -872,7 +872,7 @@ class HttpGet1(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -950,7 +950,7 @@ class HttpGet2(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -1043,7 +1043,7 @@ class HttpGet3(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -1163,15 +1163,15 @@ class Resources(BaseCRD): model_config = ConfigDict( extra="allow", ) - claims: Optional[List[Claim]] = Field( + claims: Optional[Sequence[Claim]] = Field( default=None, description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\nThis field is immutable. It can only be set for containers.", ) - limits: Optional[Dict[str, Union[Limits, Limits1]]] = Field( + limits: Optional[Mapping[str, Union[Limits, Limits1]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[Requests, Requests1]]] = Field( + requests: Optional[Mapping[str, Union[Requests, Requests1]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) @@ -1195,8 +1195,10 @@ class Capabilities(BaseCRD): model_config = ConfigDict( extra="allow", ) - add: Optional[List[str]] = Field(default=None, description="Added capabilities") - drop: Optional[List[str]] = Field(default=None, description="Removed capabilities") + add: Optional[Sequence[str]] = Field(default=None, description="Added capabilities") + drop: Optional[Sequence[str]] = Field( + default=None, description="Removed capabilities" + ) class SeLinuxOptions(BaseCRD): @@ -1319,7 +1321,7 @@ class HttpGet4(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -1428,19 +1430,19 @@ class ExtraContainer(BaseCRD): model_config = ConfigDict( extra="allow", ) - args: Optional[List[str]] = Field( + args: Optional[Sequence[str]] = Field( default=None, description='Arguments to the entrypoint.\nThe container image\'s CMD is used if this is not provided.\nVariable references $(VAR_NAME) are expanded using the container\'s environment. If a variable\ncannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced\nto a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will\nproduce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless\nof whether the variable exists or not. Cannot be updated.\nMore info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell', ) - command: Optional[List[str]] = Field( + command: Optional[Sequence[str]] = Field( default=None, description='Entrypoint array. Not executed within a shell.\nThe container image\'s ENTRYPOINT is used if this is not provided.\nVariable references $(VAR_NAME) are expanded using the container\'s environment. If a variable\ncannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced\nto a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will\nproduce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless\nof whether the variable exists or not. Cannot be updated.\nMore info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell', ) - env: Optional[List[EnvItem]] = Field( + env: Optional[Sequence[EnvItem]] = Field( default=None, description="List of environment variables to set in the container.\nCannot be updated.", ) - envFrom: Optional[List[EnvFromItem]] = Field( + envFrom: Optional[Sequence[EnvFromItem]] = Field( default=None, description="List of sources to populate environment variables in the container.\nThe keys defined within a source must be a C_IDENTIFIER. All invalid keys\nwill be reported as an event when the container is starting. When a key exists in multiple\nsources, the value associated with the last source will take precedence.\nValues defined by an Env with a duplicate key will take precedence.\nCannot be updated.", ) @@ -1464,7 +1466,7 @@ class ExtraContainer(BaseCRD): ..., description="Name of the container specified as a DNS_LABEL.\nEach container in a pod must have a unique name (DNS_LABEL).\nCannot be updated.", ) - ports: Optional[List[Port]] = Field( + ports: Optional[Sequence[Port]] = Field( default=None, description='List of ports to expose from the container. Not specifying a port here\nDOES NOT prevent that port from being exposed. Any port which is\nlistening on the default "0.0.0.0" address inside a container will be\naccessible from the network.\nModifying this array with strategic merge patch may corrupt the data.\nFor more information See https://github.com/kubernetes/kubernetes/issues/108255.\nCannot be updated.', ) @@ -1472,7 +1474,7 @@ class ExtraContainer(BaseCRD): default=None, description="Periodic probe of container service readiness.\nContainer will be removed from service endpoints if the probe fails.\nCannot be updated.\nMore info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes", ) - resizePolicy: Optional[List[ResizePolicyItem]] = Field( + resizePolicy: Optional[Sequence[ResizePolicyItem]] = Field( default=None, description="Resources resize policy for the container." ) resources: Optional[Resources] = Field( @@ -1511,11 +1513,11 @@ class ExtraContainer(BaseCRD): default=None, description="Whether this container should allocate a TTY for itself, also requires 'stdin' to be true.\nDefault is false.", ) - volumeDevices: Optional[List[VolumeDevice]] = Field( + volumeDevices: Optional[Sequence[VolumeDevice]] = Field( default=None, description="volumeDevices is the list of block devices to be used by the container.", ) - volumeMounts: Optional[List[VolumeMount]] = Field( + volumeMounts: Optional[Sequence[VolumeMount]] = Field( default=None, description="Pod volumes to mount into the container's filesystem.\nCannot be updated.", ) @@ -1604,7 +1606,7 @@ class Cephfs(BaseCRD): model_config = ConfigDict( extra="allow", ) - monitors: List[str] = Field( + monitors: Sequence[str] = Field( ..., description="monitors is Required: Monitors is a collection of Ceph monitors\nMore info: https://examples.k8s.io/volumes/cephfs/README.md#how-to-use-it", ) @@ -1675,7 +1677,7 @@ class ConfigMap(BaseCRD): default=None, description="defaultMode is optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", ) - items: Optional[List[Item]] = Field( + items: Optional[Sequence[Item]] = Field( default=None, description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", ) @@ -1719,7 +1721,7 @@ class Csi(BaseCRD): default=None, description="readOnly specifies a read-only configuration for the volume.\nDefaults to false (read/write).", ) - volumeAttributes: Optional[Dict[str, str]] = Field( + volumeAttributes: Optional[Mapping[str, str]] = Field( default=None, description="volumeAttributes stores driver-specific properties that are passed to the CSI\ndriver. Consult your driver's documentation for supported values.", ) @@ -1786,7 +1788,7 @@ class DownwardAPI(BaseCRD): default=None, description="Optional: mode bits to use on created files by default. Must be a\nOptional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDefaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", ) - items: Optional[List[Item1]] = Field( + items: Optional[Sequence[Item1]] = Field( default=None, description="Items is a list of downward API volume file" ) @@ -1881,11 +1883,11 @@ class Resources1(BaseCRD): model_config = ConfigDict( extra="allow", ) - limits: Optional[Dict[str, Union[Limits2, Limits3]]] = Field( + limits: Optional[Mapping[str, Union[Limits2, Limits3]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[Requests2, Requests3]]] = Field( + requests: Optional[Mapping[str, Union[Requests2, Requests3]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) @@ -1895,11 +1897,11 @@ class Selector(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -1909,7 +1911,7 @@ class Spec1(BaseCRD): model_config = ConfigDict( extra="allow", ) - accessModes: Optional[List[str]] = Field( + accessModes: Optional[Sequence[str]] = Field( default=None, description="accessModes contains the desired access modes the volume should have.\nMore info: https://kubernetes.io/docs/concepts/storage/persistent-volumes#access-modes-1", ) @@ -1951,7 +1953,7 @@ class VolumeClaimTemplate(BaseCRD): model_config = ConfigDict( extra="allow", ) - metadata: Optional[Dict[str, Any]] = Field( + metadata: Optional[Mapping[str, Any]] = Field( default=None, description="May contain labels and annotations that will be copied into the PVC\nwhen creating it. No other fields are allowed and will be rejected during\nvalidation.", ) @@ -1986,11 +1988,11 @@ class Fc(BaseCRD): default=None, description="readOnly is Optional: Defaults to false (read/write). ReadOnly here will force\nthe ReadOnly setting in VolumeMounts.", ) - targetWWNs: Optional[List[str]] = Field( + targetWWNs: Optional[Sequence[str]] = Field( default=None, description="targetWWNs is Optional: FC target worldwide names (WWNs)", ) - wwids: Optional[List[str]] = Field( + wwids: Optional[Sequence[str]] = Field( default=None, description="wwids Optional: FC volume world wide identifiers (wwids)\nEither wwids or combination of targetWWNs and lun must be set, but not both simultaneously.", ) @@ -2007,7 +2009,7 @@ class FlexVolume(BaseCRD): default=None, description='fsType is the filesystem type to mount.\nMust be a filesystem type supported by the host operating system.\nEx. "ext4", "xfs", "ntfs". The default filesystem depends on FlexVolume script.', ) - options: Optional[Dict[str, str]] = Field( + options: Optional[Mapping[str, str]] = Field( default=None, description="options is Optional: this field holds extra command options if any.", ) @@ -2144,7 +2146,7 @@ class Iscsi(BaseCRD): description="iscsiInterface is the interface Name that uses an iSCSI transport.\nDefaults to 'default' (tcp).", ) lun: int = Field(..., description="lun represents iSCSI Target Lun number.") - portals: Optional[List[str]] = Field( + portals: Optional[Sequence[str]] = Field( default=None, description="portals is the iSCSI Target Portal List. The portal is either an IP or ip_addr:port if the port\nis other than default (typically TCP ports 860 and 3260).", ) @@ -2229,11 +2231,11 @@ class LabelSelector4(BaseCRD): model_config = ConfigDict( extra="allow", ) - matchExpressions: Optional[List[MatchExpression2]] = Field( + matchExpressions: Optional[Sequence[MatchExpression2]] = Field( default=None, description="matchExpressions is a list of label selector requirements. The requirements are ANDed.", ) - matchLabels: Optional[Dict[str, str]] = Field( + matchLabels: Optional[Mapping[str, str]] = Field( default=None, description='matchLabels is a map of {key,value} pairs. A single {key,value} in the matchLabels\nmap is equivalent to an element of matchExpressions, whose key field is "key", the\noperator is "In", and the values array contains only "value". The requirements are ANDed.', ) @@ -2283,7 +2285,7 @@ class ConfigMap1(BaseCRD): model_config = ConfigDict( extra="allow", ) - items: Optional[List[Item2]] = Field( + items: Optional[Sequence[Item2]] = Field( default=None, description="items if unspecified, each key-value pair in the Data field of the referenced\nConfigMap will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the ConfigMap,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", ) @@ -2354,7 +2356,7 @@ class DownwardAPI1(BaseCRD): model_config = ConfigDict( extra="allow", ) - items: Optional[List[Item3]] = Field( + items: Optional[Sequence[Item3]] = Field( default=None, description="Items is a list of DownwardAPIVolume file" ) @@ -2378,7 +2380,7 @@ class Secret(BaseCRD): model_config = ConfigDict( extra="allow", ) - items: Optional[List[Item4]] = Field( + items: Optional[Sequence[Item4]] = Field( default=None, description="items if unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", ) @@ -2443,7 +2445,7 @@ class Projected(BaseCRD): default=None, description="defaultMode are the mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values for mode bits.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", ) - sources: Optional[List[Source]] = Field( + sources: Optional[Sequence[Source]] = Field( default=None, description="sources is the list of volume projections. Each entry in this list\nhandles one source.", ) @@ -2494,7 +2496,7 @@ class Rbd(BaseCRD): default="/etc/ceph/keyring", description="keyring is the path to key ring for RBDUser.\nDefault is /etc/ceph/keyring.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) - monitors: List[str] = Field( + monitors: Sequence[str] = Field( ..., description="monitors is a collection of Ceph monitors.\nMore info: https://examples.k8s.io/volumes/rbd/README.md#how-to-use-it", ) @@ -2569,7 +2571,7 @@ class Secret1(BaseCRD): default=None, description="defaultMode is Optional: mode bits used to set permissions on created files by default.\nMust be an octal value between 0000 and 0777 or a decimal value between 0 and 511.\nYAML accepts both octal and decimal values, JSON requires decimal values\nfor mode bits. Defaults to 0644.\nDirectories within the path are not affected by this setting.\nThis might be in conflict with other options that affect the file\nmode, like fsGroup, and the result can be other mode bits set.", ) - items: Optional[List[Item4]] = Field( + items: Optional[Sequence[Item4]] = Field( default=None, description="items If unspecified, each key-value pair in the Data field of the referenced\nSecret will be projected into the volume as a file whose name is the\nkey and content is the value. If specified, the listed keys will be\nprojected into the specified paths, and unlisted keys will not be\npresent. If a key is specified which is not present in the Secret,\nthe volume setup will error unless it is marked optional. Paths must be\nrelative and may not contain the '..' path or start with '..'.", ) @@ -2794,7 +2796,7 @@ class Ingress(BaseCRD): model_config = ConfigDict( extra="allow", ) - annotations: Optional[Dict[str, str]] = None + annotations: Optional[Mapping[str, str]] = None host: str ingressClassName: Optional[str] = None pathPrefix: str = Field( @@ -2912,7 +2914,7 @@ class HttpGet5(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -2958,7 +2960,7 @@ class HttpGet6(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -3022,7 +3024,7 @@ class HttpGet7(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -3090,7 +3092,7 @@ class HttpGet8(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -3182,15 +3184,15 @@ class Resources2(BaseCRD): model_config = ConfigDict( extra="allow", ) - claims: Optional[List[Claim]] = Field( + claims: Optional[Sequence[Claim]] = Field( default=None, description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\nThis field is immutable. It can only be set for containers.", ) - limits: Optional[Dict[str, Union[Limits4, Limits5]]] = Field( + limits: Optional[Mapping[str, Union[Limits4, Limits5]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[Requests4, Requests5]]] = Field( + requests: Optional[Mapping[str, Union[Requests4, Requests5]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) @@ -3258,7 +3260,7 @@ class HttpGet9(BaseCRD): default=None, description='Host name to connect to, defaults to the pod IP. You probably want to set\n"Host" in httpHeaders instead.', ) - httpHeaders: Optional[List[HttpHeader]] = Field( + httpHeaders: Optional[Sequence[HttpHeader]] = Field( default=None, description="Custom headers to set in the request. HTTP allows repeated headers.", ) @@ -3322,19 +3324,19 @@ class InitContainer(BaseCRD): model_config = ConfigDict( extra="allow", ) - args: Optional[List[str]] = Field( + args: Optional[Sequence[str]] = Field( default=None, description='Arguments to the entrypoint.\nThe container image\'s CMD is used if this is not provided.\nVariable references $(VAR_NAME) are expanded using the container\'s environment. If a variable\ncannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced\nto a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will\nproduce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless\nof whether the variable exists or not. Cannot be updated.\nMore info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell', ) - command: Optional[List[str]] = Field( + command: Optional[Sequence[str]] = Field( default=None, description='Entrypoint array. Not executed within a shell.\nThe container image\'s ENTRYPOINT is used if this is not provided.\nVariable references $(VAR_NAME) are expanded using the container\'s environment. If a variable\ncannot be resolved, the reference in the input string will be unchanged. Double $$ are reduced\nto a single $, which allows for escaping the $(VAR_NAME) syntax: i.e. "$$(VAR_NAME)" will\nproduce the string literal "$(VAR_NAME)". Escaped references will never be expanded, regardless\nof whether the variable exists or not. Cannot be updated.\nMore info: https://kubernetes.io/docs/tasks/inject-data-application/define-command-argument-container/#running-a-command-in-a-shell', ) - env: Optional[List[EnvItem1]] = Field( + env: Optional[Sequence[EnvItem1]] = Field( default=None, description="List of environment variables to set in the container.\nCannot be updated.", ) - envFrom: Optional[List[EnvFromItem1]] = Field( + envFrom: Optional[Sequence[EnvFromItem1]] = Field( default=None, description="List of sources to populate environment variables in the container.\nThe keys defined within a source must be a C_IDENTIFIER. All invalid keys\nwill be reported as an event when the container is starting. When a key exists in multiple\nsources, the value associated with the last source will take precedence.\nValues defined by an Env with a duplicate key will take precedence.\nCannot be updated.", ) @@ -3358,7 +3360,7 @@ class InitContainer(BaseCRD): ..., description="Name of the container specified as a DNS_LABEL.\nEach container in a pod must have a unique name (DNS_LABEL).\nCannot be updated.", ) - ports: Optional[List[Port]] = Field( + ports: Optional[Sequence[Port]] = Field( default=None, description='List of ports to expose from the container. Not specifying a port here\nDOES NOT prevent that port from being exposed. Any port which is\nlistening on the default "0.0.0.0" address inside a container will be\naccessible from the network.\nModifying this array with strategic merge patch may corrupt the data.\nFor more information See https://github.com/kubernetes/kubernetes/issues/108255.\nCannot be updated.', ) @@ -3366,7 +3368,7 @@ class InitContainer(BaseCRD): default=None, description="Periodic probe of container service readiness.\nContainer will be removed from service endpoints if the probe fails.\nCannot be updated.\nMore info: https://kubernetes.io/docs/concepts/workloads/pods/pod-lifecycle#container-probes", ) - resizePolicy: Optional[List[ResizePolicyItem]] = Field( + resizePolicy: Optional[Sequence[ResizePolicyItem]] = Field( default=None, description="Resources resize policy for the container." ) resources: Optional[Resources2] = Field( @@ -3405,11 +3407,11 @@ class InitContainer(BaseCRD): default=None, description="Whether this container should allocate a TTY for itself, also requires 'stdin' to be true.\nDefault is false.", ) - volumeDevices: Optional[List[VolumeDevice]] = Field( + volumeDevices: Optional[Sequence[VolumeDevice]] = Field( default=None, description="volumeDevices is the list of block devices to be used by the container.", ) - volumeMounts: Optional[List[VolumeMount]] = Field( + volumeMounts: Optional[Sequence[VolumeMount]] = Field( default=None, description="Pod volumes to mount into the container's filesystem.\nCannot be updated.", ) @@ -3419,6 +3421,11 @@ class InitContainer(BaseCRD): ) +class Location(Enum): + local = "local" + remote = "remote" + + class ReconcileStrategy(Enum): never = "never" always = "always" @@ -3512,6 +3519,21 @@ class ReadinessProbe2(BaseCRD): type: Type3 = Field(default="tcp", description="The type of readiness probe") +class RemoteSecretRef(BaseCRD): + model_config = ConfigDict( + extra="allow", + ) + adopt: Optional[bool] = Field( + default=None, + description="If the secret is adopted then the operator will delete the secret when the custom resource that uses it is deleted.", + ) + key: Optional[str] = Field( + default=None, + description="The key is optional because it may not be relevant depending on where or how the secret is used.\nFor example, for authentication see the `secretRef` field in `spec.authentication`\nfor more details.", + ) + name: str + + class Limits6(RootModel[int]): root: int = Field( ..., @@ -3544,15 +3566,15 @@ class Resources3(BaseCRD): model_config = ConfigDict( extra="allow", ) - claims: Optional[List[Claim]] = Field( + claims: Optional[Sequence[Claim]] = Field( default=None, description="Claims lists the names of resources, defined in spec.resourceClaims,\nthat are used by this container.\n\nThis is an alpha field and requires enabling the\nDynamicResourceAllocation feature gate.\n\nThis field is immutable. It can only be set for containers.", ) - limits: Optional[Dict[str, Union[Limits6, Limits7]]] = Field( + limits: Optional[Mapping[str, Union[Limits6, Limits7]]] = Field( default=None, description="Limits describes the maximum amount of compute resources allowed.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) - requests: Optional[Dict[str, Union[Requests6, Requests7]]] = Field( + requests: Optional[Mapping[str, Union[Requests6, Requests7]]] = Field( default=None, description="Requests describes the minimum amount of compute resources required.\nIf Requests is omitted for a container, it defaults to Limits if that is explicitly specified,\notherwise to an implementation-defined value. Requests cannot exceed Limits.\nMore info: https://kubernetes.io/docs/concepts/configuration/manage-resources-containers/", ) @@ -3604,16 +3626,16 @@ class Session(BaseCRD): model_config = ConfigDict( extra="allow", ) - args: Optional[List[str]] = Field( + args: Optional[Sequence[str]] = Field( default=None, description="The arguments to run in the session container, if omitted it will use the Docker image CMD", ) - command: Optional[List[str]] = Field( + command: Optional[Sequence[str]] = Field( default=None, description="The command to run in the session container, if omitted it will use the Docker image ENTRYPOINT", ) - env: Optional[List[EnvItem2]] = None - extraVolumeMounts: Optional[List[ExtraVolumeMount]] = Field( + env: Optional[Sequence[EnvItem2]] = None + extraVolumeMounts: Optional[Sequence[ExtraVolumeMount]] = Field( default=None, description="Additional volume mounts for the session container" ) image: str @@ -3630,6 +3652,10 @@ class Session(BaseCRD): readinessProbe: ReadinessProbe2 = Field( default={}, description="The readiness probe to use on the session container" ) + remoteSecretRef: Optional[RemoteSecretRef] = Field( + default=None, + description='The secret containing the configuration needed to start a remote session.\nThis field should be populated only when the session location is set to "remote".\nThis secret will be loaded into environment variables passed to the remote\nsession controller.\nSee: [internal/remote/config.Config] for a list of configuration options.', + ) resources: Optional[Resources3] = Field( default=None, description="Resource requirements and limits in the same format as a Pod in Kubernetes", @@ -3695,22 +3721,22 @@ class Spec(BaseCRD): authentication: Optional[Authentication] = Field( default=None, description="Authentication configuration for the session" ) - codeRepositories: Optional[List[CodeRepository]] = Field( + codeRepositories: Optional[Sequence[CodeRepository]] = Field( default=None, description="A list of code repositories and associated configuration that will be cloned in the session", ) culling: Optional[Culling] = Field( default=None, description="Culling configuration" ) - dataSources: Optional[List[DataSource]] = Field( + dataSources: Optional[Sequence[DataSource]] = Field( default=None, description="A list of data sources that should be added to the session", ) - extraContainers: Optional[List[ExtraContainer]] = Field( + extraContainers: Optional[Sequence[ExtraContainer]] = Field( default=None, description="Additional containers to add to the session statefulset.\nNOTE: The container names provided will be partially overwritten and randomized to avoid collisions", ) - extraVolumes: Optional[List[ExtraVolume]] = Field( + extraVolumes: Optional[Sequence[ExtraVolume]] = Field( default=None, description="Additional volumes to include in the statefulset for a session\nVolumes used internally by amalthea are all prefixed with 'amalthea-' so as long as you\navoid that naming you will avoid conflicts with the volumes that amalthea generates.", ) @@ -3718,7 +3744,7 @@ class Spec(BaseCRD): ..., description="Will hibernate the session, scaling the session's statefulset to zero.", ) - imagePullSecrets: Optional[List[ImagePullSecret]] = Field( + imagePullSecrets: Optional[Sequence[ImagePullSecret]] = Field( default=None, description="List of secrets that contain credentials for pulling private images", ) @@ -3726,11 +3752,15 @@ class Spec(BaseCRD): default=None, description="Configuration for an ingress to the session, if omitted a Kubernetes Ingress will not be created", ) - initContainers: Optional[List[InitContainer]] = Field( + initContainers: Optional[Sequence[InitContainer]] = Field( default=None, description="Additional init containers to add to the session statefulset\nNOTE: The container names provided will be partially overwritten and randomized to avoid collisions", ) - nodeSelector: Optional[Dict[str, str]] = Field( + location: Location = Field( + default="local", + description='Specifies whether the process running the user\'s session is local or remote.\n- A local session runs as a container in the same pod as where the AmaltheaSession is defined and running.\n- A remote session runs as a remote process on an external compute resource.\n The remote process is controlled by the "session_controller (TBC)" container in the session pod.', + ) + nodeSelector: Optional[Mapping[str, str]] = Field( default=None, description="Selector which must match a node's labels for the pod to be scheduled on that node.\nPassed right through to the Statefulset used for the session.", ) @@ -3750,7 +3780,7 @@ class Spec(BaseCRD): ..., description="Specification for the main session container that the user will access and use", ) - tolerations: Optional[List[Toleration]] = Field( + tolerations: Optional[Sequence[Toleration]] = Field( default=None, description="If specified, the pod's tolerations.\nPassed right through to the Statefulset used for the session.", ) @@ -3795,7 +3825,7 @@ class Status(BaseCRD): model_config = ConfigDict( extra="allow", ) - conditions: Optional[List[Condition]] = Field( + conditions: Optional[Sequence[Condition]] = Field( default=None, description="Conditions store the status conditions of the AmaltheaSessions. This is a standard thing that\nmany operators implement see https://github.com/kubernetes/community/blob/master/contributors/devel/sig-architecture/api-conventions.md#typical-status-properties", ) @@ -3807,6 +3837,7 @@ class Status(BaseCRD): default=None, description="If the state is failed then the message will contain information about what went wrong, otherwise it is empty", ) + failedSchedulingSince: Optional[datetime] = None failingSince: Optional[datetime] = None hibernatedSince: Optional[datetime] = None idle: bool = False @@ -3831,7 +3862,7 @@ class Model(BaseCRD): default=None, description="Kind is a string value representing the REST resource this object represents.\nServers may infer this from the endpoint the client submits requests to.\nCannot be updated.\nIn CamelCase.\nMore info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds", ) - metadata: Optional[Dict[str, Any]] = None + metadata: Optional[Mapping[str, Any]] = None spec: Optional[Spec] = Field( default=None, description="AmaltheaSessionSpec defines the desired state of AmaltheaSession", diff --git a/components/renku_data_services/notebooks/cr_jupyter_server.py b/components/renku_data_services/notebooks/cr_jupyter_server.py index 9d42a5cfb..f4b73dcca 100644 --- a/components/renku_data_services/notebooks/cr_jupyter_server.py +++ b/components/renku_data_services/notebooks/cr_jupyter_server.py @@ -1,11 +1,11 @@ # generated by datamodel-codegen: # filename: -# timestamp: 2025-07-07T13:49:35+00:00 +# timestamp: 2025-10-08T09:35:04+00:00 from __future__ import annotations from enum import Enum -from typing import Any, Dict, List, Optional, Union +from typing import Any, Mapping, Optional, Sequence, Union from pydantic import ConfigDict, Field from renku_data_services.notebooks.cr_base import BaseCRD @@ -47,11 +47,11 @@ class Oidc(BaseCRD): model_config = ConfigDict( extra="allow", ) - authorizedEmails: List[str] = Field( + authorizedEmails: Sequence[str] = Field( default=[], description='List of users (identified by Email address read from the "email" OIDC claim) which are allowed to access this Jupyter session. This list is stored as a file and passed to the `--authenticated-emails-file` option (see https://oauth2-proxy.github.io/oauth2-proxy/docs/configuration/overview#command-line-options).', ) - authorizedGroups: List[str] = Field( + authorizedGroups: Sequence[str] = Field( default=[], description='List of groups of users (read from the "groups" OIDC claim) which are allowed to access this Jupyter session. This list passed to the `--allowed-group` option (see https://oauth2-proxy.github.io/oauth2-proxy/docs/configuration/overview#command-line-options).', ) @@ -127,7 +127,7 @@ class JupyterServer(BaseCRD): default=False, description="Whether the server is hibernated or not." ) image: str = "jupyter/minimal-notebook:latest" - resources: Dict[str, Any] = Field( + resources: Mapping[str, Any] = Field( default={}, description="Regular K8s resource requests, will be set on the main notebook container.", ) @@ -169,7 +169,7 @@ class Routing(BaseCRD): default=None, description="Host under which the server will be available (eg myserver.example.com), should not include the schema.", ) - ingressAnnotations: Dict[str, Any] = {} + ingressAnnotations: Mapping[str, Any] = {} path: str = Field( default="/", description="Optionally make the server available under some path." ) @@ -222,7 +222,7 @@ class Spec(BaseCRD): default={}, description="Configuration options (such as image to run) for the Jupyter server. See also https://jupyter-server.readthedocs.io/en/latest/other/full-config.html", ) - patches: List[Patch] = Field( + patches: Sequence[Patch] = Field( default=[], description="Patches to be applied to the created child resources after template rendering. Currently json patches and json merge patches are supported.", ) @@ -243,7 +243,7 @@ class Model(BaseCRD): default=None, description="User defined specification for a JupyterServer custom resource.", ) - status: Dict[str, Any] = Field( + status: Mapping[str, Any] = Field( default={"children": {}, "mainPod": {}}, description="A field for Jupyter Server status information, do not modify.", ) diff --git a/components/renku_data_services/notebooks/crs.py b/components/renku_data_services/notebooks/crs.py index 69e5b4885..dcbaa0a8a 100644 --- a/components/renku_data_services/notebooks/crs.py +++ b/components/renku_data_services/notebooks/crs.py @@ -1,6 +1,9 @@ """Custom resource definition with proper names from the autogenerated code.""" +from __future__ import annotations + import re +from collections.abc import Mapping from datetime import datetime, timedelta from typing import Any, cast, override from urllib.parse import urlunparse @@ -8,6 +11,7 @@ from kubernetes.utils import parse_duration, parse_quantity from kubernetes.utils.duration import format_duration from pydantic import BaseModel, Field, field_serializer, field_validator, model_serializer +from pydantic.types import HashableItemType from ulid import ULID from renku_data_services.errors import errors @@ -32,9 +36,8 @@ Preference, PreferredDuringSchedulingIgnoredDuringExecutionItem, ReconcileStrategy, + RemoteSecretRef, RequiredDuringSchedulingIgnoredDuringExecution, - SecretRef, - Session, Size, State, Status, @@ -47,13 +50,16 @@ ) from renku_data_services.notebooks.cr_amalthea_session import EnvItem2 as SessionEnvItem from renku_data_services.notebooks.cr_amalthea_session import Item4 as SecretAsVolumeItem -from renku_data_services.notebooks.cr_amalthea_session import Limits6 as Limits +from renku_data_services.notebooks.cr_amalthea_session import Limits6 as _Limits from renku_data_services.notebooks.cr_amalthea_session import Limits7 as LimitsStr +from renku_data_services.notebooks.cr_amalthea_session import Location as SessionLocation from renku_data_services.notebooks.cr_amalthea_session import Model as _ASModel -from renku_data_services.notebooks.cr_amalthea_session import Requests6 as Requests +from renku_data_services.notebooks.cr_amalthea_session import Requests6 as _Requests from renku_data_services.notebooks.cr_amalthea_session import Requests7 as RequestsStr -from renku_data_services.notebooks.cr_amalthea_session import Resources3 as Resources +from renku_data_services.notebooks.cr_amalthea_session import Resources3 as _Resources from renku_data_services.notebooks.cr_amalthea_session import Secret1 as SecretAsVolume +from renku_data_services.notebooks.cr_amalthea_session import SecretRef as _SecretRef +from renku_data_services.notebooks.cr_amalthea_session import Session as _ASSession from renku_data_services.notebooks.cr_amalthea_session import ShmSize1 as ShmSizeStr from renku_data_services.notebooks.cr_amalthea_session import Size1 as SizeStr from renku_data_services.notebooks.cr_amalthea_session import Spec as _ASSpec @@ -162,9 +168,50 @@ def __deserialize_duration(cls, val: Any, handler: Any) -> Any: return handler(val) +class Requests(_Requests): + """Resource requests of type integer.""" + + root: int + + +class Limits(_Limits): + """Resource limits of type integer.""" + + root: int + + +class Resources(_Resources): + """Resource requests and limits spec. + + Overriding these is necessary because of + https://docs.pydantic.dev/2.11/errors/validation_errors/#string_type. + An integer model cannot have a regex pattern for validation in pydantic. + But the code generation applies the pattern constraint to both the int and string variations + of the fields. But the int variation runs and blows up at runtime only when an int is passed + for validation. + """ + + limits: Mapping[str, LimitsStr | Limits] | None = None + requests: Mapping[str, RequestsStr | Requests] | None = None + + +class SecretRef(_SecretRef, RemoteSecretRef): + """Reference to a secret.""" + + pass + + +class Session(_ASSession): + """Amalthea spec.session schema.""" + + resources: Resources | None = None + remoteSecretRef: SecretRef | None = None + + class AmaltheaSessionSpec(_ASSpec): """Amalthea session specification.""" + session: Session culling: Culling | None = None @@ -181,8 +228,12 @@ def get_compute_resources(self) -> ComputeResources: """Convert the k8s resource requests and storage into usable values.""" resource_requests: dict = {} if self.spec.session.resources is not None: - resource_requests = self.spec.session.resources.requests or {} - resource_requests["storage"] = self.spec.session.storage.size + reqs = self.spec.session.resources.requests or {} + reqs = {k: parse_quantity(v.root if hasattr(v, "root") else v) for k, v in reqs.items()} + resource_requests = { + **reqs, + "storage": parse_quantity(self.spec.session.storage.size.root), + } return ComputeResources.model_validate(resource_requests) @property @@ -263,22 +314,22 @@ def as_apispec(self) -> apispec.SessionResponse: Status(idle=True, idleSince=idle_since), Culling(maxIdleDuration=max_idle), ) if idle_since and max_idle: - will_hibernate_at = idle_since + safe_parse_duration(max_idle) + will_hibernate_at = idle_since + max_idle case ( Status(state=State.Failed, failingSince=failing_since), Culling(maxFailedDuration=max_failed), ) if failing_since and max_failed: - will_hibernate_at = failing_since + safe_parse_duration(max_failed) + will_hibernate_at = failing_since + max_failed case ( Status(state=State.NotReady), Culling(maxAge=max_age), ) if max_age and self.metadata.creationTimestamp: - will_hibernate_at = self.metadata.creationTimestamp + safe_parse_duration(max_age) + will_hibernate_at = self.metadata.creationTimestamp + max_age case ( Status(state=State.Hibernated, hibernatedSince=hibernated_since), Culling(maxHibernatedDuration=max_hibernated), ) if hibernated_since and max_hibernated: - will_delete_at = hibernated_since + safe_parse_duration(max_hibernated) + will_delete_at = hibernated_since + max_hibernated return apispec.SessionResponse( image=self.spec.session.image, @@ -331,6 +382,13 @@ class AmaltheaSessionV1Alpha1SpecSessionPatch(BaseCRD): shmSize: int | str | None = None storage: Storage | None = None imagePullPolicy: ImagePullPolicy | None = None + extraVolumeMounts: list[ExtraVolumeMount] | None = None + + +class AmaltheaSessionV1Alpha1MetadataPatch(BaseCRD): + """Patch for the metadata of an amalthea session.""" + + annotations: dict[str, str] | None = None class AmaltheaSessionV1Alpha1SpecPatch(BaseCRD): @@ -352,6 +410,7 @@ class AmaltheaSessionV1Alpha1SpecPatch(BaseCRD): class AmaltheaSessionV1Alpha1Patch(BaseCRD): """Patch for an amalthea session.""" + metadata: AmaltheaSessionV1Alpha1MetadataPatch | None = None spec: AmaltheaSessionV1Alpha1SpecPatch def to_rfc7386(self) -> dict[str, Any]: diff --git a/components/renku_data_services/notebooks/image_check.py b/components/renku_data_services/notebooks/image_check.py new file mode 100644 index 000000000..88207227e --- /dev/null +++ b/components/renku_data_services/notebooks/image_check.py @@ -0,0 +1,149 @@ +"""Functions for checking access to images. + +Access to docker images can fall into these cases: + +1. The image is public and exists. It can be accessed anonymously +2. The image cannot be found. It may be absent or it requires credentials to access it + +For the latter case, try to find out as much as possible: +- Look for credentials in the connected services +- If there are no connections defined for that user and registry, image is not accessible +- Try access it with the credentials, if it still fails the token could be invalid. +- Try to obtain the connected account that checks the token validity +""" + +from __future__ import annotations + +from dataclasses import dataclass, field +from typing import final + +import httpx +from authlib.integrations.httpx_client import OAuthError + +from renku_data_services.app_config import logging +from renku_data_services.base_models.core import APIUser +from renku_data_services.connected_services.db import ConnectedServicesRepository +from renku_data_services.connected_services.models import ImageProvider, OAuth2Client, OAuth2Connection +from renku_data_services.errors import errors +from renku_data_services.notebooks.api.classes.image import Image, ImageRepoDockerAPI +from renku_data_services.notebooks.config import NotebooksConfig + +logger = logging.getLogger(__name__) + + +@final +@dataclass(frozen=True) +class CheckResult: + """Result of checking access to an image.""" + + accessible: bool + response_code: int + image_provider: ImageProvider | None = None + token: str | None = field(default=None, repr=False) + error: errors.UnauthorizedError | None = None + + def __str__(self) -> str: + token = "***" if self.token else "None" + error = "unauthorized" if self.error else "None" + return ( + "CheckResult(" + f"accessible={self.accessible}/{self.response_code}, " + f"provider={self.image_provider}, token={token}, error={error})" + ) + + @property + def connection(self) -> OAuth2Connection | None: + """Return the connection if present.""" + if self.image_provider is None: + return None + if self.image_provider.connected_user is None: + return None + return self.image_provider.connected_user.connection + + @property + def client(self) -> OAuth2Client | None: + """Return the OAuth2 client if present.""" + if self.image_provider is None: + return None + return self.image_provider.provider + + @property + def user(self) -> APIUser | None: + """Return the connected user if applicable.""" + if self.image_provider is None: + return None + if self.image_provider.connected_user is None: + return None + return self.image_provider.connected_user.user + + +@dataclass +class InternalGitLabConfig: + """Required for internal gitlab, which will be shut down soon.""" + + gitlab_user: APIUser + nb_config: NotebooksConfig + + +async def check_image_path( + image_path: str, + user: APIUser, + connected_services: ConnectedServicesRepository, + internal_gitlab_config: InternalGitLabConfig | None, +) -> CheckResult: + """Check access to the given image.""" + image = Image.from_path(image_path) + return await check_image(image, user, connected_services, internal_gitlab_config) + + +async def check_image( + image: Image, + user: APIUser, + connected_services: ConnectedServicesRepository, + intern_gl_cfg: InternalGitLabConfig | None, +) -> CheckResult: + """Check access to the given image.""" + + reg_api: ImageRepoDockerAPI = image.repo_api() # public images + unauth_error: errors.UnauthorizedError | None = None + image_provider = await connected_services.get_provider_for_image(user, image) + connected_user = image_provider.connected_user if image_provider is not None else None + connection = connected_user.connection if connected_user is not None else None + if image_provider is not None: + try: + reg_api = await connected_services.get_image_repo_client(image_provider) + except errors.UnauthorizedError as e: + logger.info(f"Error getting image repo client for image {image}: {e}") + unauth_error = e + except OAuthError as e: + logger.info(f"Error getting image repo client for image {image}: {e}") + unauth_error = errors.UnauthorizedError(message=f"OAuth error when getting repo client for image: {image}") + unauth_error.__cause__ = e + elif ( + intern_gl_cfg + and image.hostname == intern_gl_cfg.nb_config.git.registry + and intern_gl_cfg.gitlab_user.access_token + ): + logger.debug(f"Using internal gitlab at {intern_gl_cfg.nb_config.git.registry}") + reg_api = reg_api.with_oauth2_token(intern_gl_cfg.gitlab_user.access_token) + + try: + result = await reg_api.image_check(image) + except httpx.HTTPError as e: + logger.info(f"Error connecting {reg_api.scheme}://{reg_api.hostname}: {e}") + result = 0 + + if result != 200 and connection is not None: + try: + await connected_services.get_oauth2_connected_account(connection.id, user) + except errors.UnauthorizedError as e: + logger.info(f"Error getting connected account: {e}") + unauth_error = e + + return CheckResult( + accessible=result == 200, + response_code=result, + image_provider=image_provider, + token=reg_api.oauth2_token, + error=unauth_error, + ) diff --git a/components/renku_data_services/notebooks/models.py b/components/renku_data_services/notebooks/models.py index 7720c6936..82cd414a2 100644 --- a/components/renku_data_services/notebooks/models.py +++ b/components/renku_data_services/notebooks/models.py @@ -2,15 +2,21 @@ from dataclasses import dataclass, field from pathlib import Path +from typing import cast -from kubernetes.client import V1Secret +from kubernetes.client import V1ObjectMeta, V1Secret from pydantic import AliasGenerator, BaseModel, Field, Json +from renku_data_services.data_connectors.models import DataConnectorSecret +from renku_data_services.errors import errors from renku_data_services.errors.errors import ProgrammingError from renku_data_services.notebooks.crs import ( AmaltheaSessionV1Alpha1, + DataSource, + ExtraContainer, ExtraVolume, ExtraVolumeMount, + InitContainer, SecretRef, ) @@ -92,7 +98,20 @@ class ExtraSecret: volume_mount: ExtraVolumeMount | None = None adopt: bool = True - def key_ref(self, key: str) -> SecretRef: + def __post_init__(self) -> None: + if not self.secret.metadata: + raise errors.ValidationError(message="The secret in Extra secret is missing its metadata.") + if isinstance(self.secret.metadata, V1ObjectMeta): + secret_name = cast(str | None, self.secret.metadata.name) + else: + secret_name = cast(str | None, self.secret.metadata.get("name")) + if not isinstance(secret_name, str): + raise errors.ValidationError(message="The secret name in Extra secret is not a string.") + if len(secret_name) == 0: + raise errors.ValidationError(message="The secret name in Extra secret is empty.") + self.__secret_name = secret_name + + def key_ref(self, key: str | None = None) -> SecretRef: """Get an amalthea secret key reference.""" meta = self.secret.metadata if not meta: @@ -102,7 +121,7 @@ def key_ref(self, key: str) -> SecretRef: raise ProgrammingError(message="Cannot get reference to a secret that does not have a name.") data = self.secret.data or {} string_data = self.secret.string_data or {} - if key not in data and key not in string_data: + if key is not None and key not in data and key not in string_data: raise KeyError(f"Cannot find the key {key} in the secret with name {secret_name}") return SecretRef(key=key, name=secret_name, adopt=self.adopt) @@ -115,3 +134,38 @@ def ref(self) -> SecretRef: if not secret_name: raise ProgrammingError(message="Cannot get reference to a secret that does not have a name.") return SecretRef(name=secret_name, adopt=self.adopt) + + @property + def name(self) -> str: + """Return the name of the secret.""" + return self.__secret_name + + +@dataclass(frozen=True, kw_only=True) +class SessionExtraResources: + """Represents extra resources to add to an amalthea session.""" + + containers: list[ExtraContainer] = field(default_factory=list) + data_connector_secrets: dict[str, list[DataConnectorSecret]] = field(default_factory=dict) + data_sources: list[DataSource] = field(default_factory=list) + init_containers: list[InitContainer] = field(default_factory=list) + secrets: list[ExtraSecret] = field(default_factory=list) + volume_mounts: list[ExtraVolumeMount] = field(default_factory=list) + volumes: list[ExtraVolume] = field(default_factory=list) + + def concat(self, added_extras: "SessionExtraResources | None") -> "SessionExtraResources": + """Concatenates these session extras with more session extras.""" + if added_extras is None: + return self + data_connector_secrets: dict[str, list[DataConnectorSecret]] = dict() + data_connector_secrets.update(self.data_connector_secrets) + data_connector_secrets.update(added_extras.data_connector_secrets) + return SessionExtraResources( + containers=self.containers + added_extras.containers, + data_connector_secrets=data_connector_secrets, + data_sources=self.data_sources + added_extras.data_sources, + init_containers=self.init_containers + added_extras.init_containers, + secrets=self.secrets + added_extras.secrets, + volume_mounts=self.volume_mounts + added_extras.volume_mounts, + volumes=self.volumes + added_extras.volumes, + ) diff --git a/components/renku_data_services/notebooks/util/authn.py b/components/renku_data_services/notebooks/util/authn.py deleted file mode 100644 index d01b169e0..000000000 --- a/components/renku_data_services/notebooks/util/authn.py +++ /dev/null @@ -1,41 +0,0 @@ -"""Authentication that is compatible with the tokens sent to the notebook service.""" - -from collections.abc import Callable, Coroutine -from functools import wraps -from typing import Any, Concatenate, ParamSpec, TypeVar - -from sanic import Request - -from renku_data_services.base_models import AnonymousAPIUser, APIUser, AuthenticatedAPIUser, Authenticator - -_T = TypeVar("_T") -_P = ParamSpec("_P") - - -def notebooks_internal_gitlab_authenticate( - authenticator: Authenticator, -) -> Callable[ - [Callable[Concatenate[Request, AuthenticatedAPIUser | AnonymousAPIUser, APIUser, _P], Coroutine[Any, Any, _T]]], - Callable[Concatenate[Request, AuthenticatedAPIUser | AnonymousAPIUser, _P], Coroutine[Any, Any, _T]], -]: - """Decorator for a Sanic handler that that adds a notebooks user.""" - - def decorator( - f: Callable[ - Concatenate[Request, AuthenticatedAPIUser | AnonymousAPIUser, APIUser, _P], Coroutine[Any, Any, _T] - ], - ) -> Callable[Concatenate[Request, AuthenticatedAPIUser | AnonymousAPIUser, _P], Coroutine[Any, Any, _T]]: - @wraps(f) - async def decorated_function( - request: Request, - user: AuthenticatedAPIUser | AnonymousAPIUser, - *args: _P.args, - **kwargs: _P.kwargs, - ) -> _T: - access_token = str(request.headers.get("Gitlab-Access-Token")) - internal_gitlab_user = await authenticator.authenticate(access_token, request) - return await f(request, user, internal_gitlab_user, *args, **kwargs) - - return decorated_function - - return decorator diff --git a/components/renku_data_services/notebooks/util/kubernetes_.py b/components/renku_data_services/notebooks/util/kubernetes_.py index f64a219a7..180b8bc05 100644 --- a/components/renku_data_services/notebooks/util/kubernetes_.py +++ b/components/renku_data_services/notebooks/util/kubernetes_.py @@ -19,6 +19,7 @@ from __future__ import annotations import re +from collections.abc import Sequence from enum import StrEnum from hashlib import md5 from typing import Any, TypeAlias, cast @@ -108,7 +109,7 @@ class PatchKind(StrEnum): merge = "application/merge-patch+json" -def find_container(patches: list[Patch], container_name: str) -> dict[str, Any] | None: +def find_container(patches: Sequence[Patch], container_name: str) -> dict[str, Any] | None: """Find the json patch corresponding a given container.""" # rfc 7386 patches are dictionaries, i.e. merge patch or json merge patch # rfc 6902 patches are lists, i.e. json patch diff --git a/components/renku_data_services/notebooks/utils.py b/components/renku_data_services/notebooks/utils.py index 132abc136..e9cdd3b5d 100644 --- a/components/renku_data_services/notebooks/utils.py +++ b/components/renku_data_services/notebooks/utils.py @@ -13,44 +13,71 @@ ) -def merge_node_affinities( +def intersect_node_affinities( node_affinity1: NodeAffinity, node_affinity2: NodeAffinity, ) -> NodeAffinity: """Merge two node affinities into a brand new object.""" output = NodeAffinity() - if node_affinity1.preferredDuringSchedulingIgnoredDuringExecution: - output.preferredDuringSchedulingIgnoredDuringExecution = ( - node_affinity1.preferredDuringSchedulingIgnoredDuringExecution - ) - if node_affinity2.preferredDuringSchedulingIgnoredDuringExecution: - if output.preferredDuringSchedulingIgnoredDuringExecution: - output.preferredDuringSchedulingIgnoredDuringExecution.extend( - node_affinity2.preferredDuringSchedulingIgnoredDuringExecution - ) - else: - output.preferredDuringSchedulingIgnoredDuringExecution = ( - node_affinity2.preferredDuringSchedulingIgnoredDuringExecution - ) + if ( - node_affinity1.requiredDuringSchedulingIgnoredDuringExecution - and node_affinity1.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms + node_affinity1.preferredDuringSchedulingIgnoredDuringExecution + or node_affinity2.preferredDuringSchedulingIgnoredDuringExecution ): - output.requiredDuringSchedulingIgnoredDuringExecution = RequiredDuringSchedulingIgnoredDuringExecution( - nodeSelectorTerms=node_affinity1.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms - ) + items = [ + *(node_affinity1.preferredDuringSchedulingIgnoredDuringExecution or []), + *(node_affinity2.preferredDuringSchedulingIgnoredDuringExecution or []), + ] + if items: + output.preferredDuringSchedulingIgnoredDuringExecution = items + + # node_affinity1 and node_affinity2 have nodeSelectorTerms, we preform a cross product if ( + node_affinity1.requiredDuringSchedulingIgnoredDuringExecution + and node_affinity1.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms + ) and ( node_affinity2.requiredDuringSchedulingIgnoredDuringExecution and node_affinity2.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms ): - if output.requiredDuringSchedulingIgnoredDuringExecution: - output.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms.extend( - node_affinity2.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms + terms_1 = [*node_affinity1.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms] + terms_2 = [*node_affinity2.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms] + terms_out: list[NodeSelectorTerm] = [] + for term_1 in terms_1: + for term_2 in terms_2: + term_out = NodeSelectorTerm() + matchExpressions = [*(term_1.matchExpressions or []), *(term_2.matchExpressions or [])] + if matchExpressions: + term_out.matchExpressions = matchExpressions + matchFields = [*(term_1.matchFields or []), *(term_2.matchFields or [])] + if matchFields: + term_out.matchFields = matchFields + if term_out.matchExpressions or term_out.matchFields: + terms_out.append(term_out) + if terms_out: + output.requiredDuringSchedulingIgnoredDuringExecution = RequiredDuringSchedulingIgnoredDuringExecution( + nodeSelectorTerms=terms_out + ) + # only node_affinity1 has nodeSelectorTerms, we pick them unchanged + elif ( + node_affinity1.requiredDuringSchedulingIgnoredDuringExecution + and node_affinity1.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms + ): + terms_1 = [*node_affinity1.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms] + if terms_1: + output.requiredDuringSchedulingIgnoredDuringExecution = RequiredDuringSchedulingIgnoredDuringExecution( + nodeSelectorTerms=terms_1 ) - else: + # only node_affinity2 has nodeSelectorTerms, we pick them unchanged + elif ( + node_affinity2.requiredDuringSchedulingIgnoredDuringExecution + and node_affinity2.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms + ): + terms_2 = [*node_affinity2.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms] + if terms_2: output.requiredDuringSchedulingIgnoredDuringExecution = RequiredDuringSchedulingIgnoredDuringExecution( - nodeSelectorTerms=(node_affinity2.requiredDuringSchedulingIgnoredDuringExecution.nodeSelectorTerms) + nodeSelectorTerms=terms_2 ) + return output @@ -95,7 +122,7 @@ def node_affinity_from_resource_class( affinity = default_affinity.model_copy(deep=True) if affinity.nodeAffinity: - affinity.nodeAffinity = merge_node_affinities(affinity.nodeAffinity, rc_node_affinity) + affinity.nodeAffinity = intersect_node_affinities(affinity.nodeAffinity, rc_node_affinity) else: affinity.nodeAffinity = rc_node_affinity return affinity diff --git a/components/renku_data_services/platform/api.spec.yaml b/components/renku_data_services/platform/api.spec.yaml index 7080df987..1605c5023 100644 --- a/components/renku_data_services/platform/api.spec.yaml +++ b/components/renku_data_services/platform/api.spec.yaml @@ -46,6 +46,118 @@ paths: $ref: "#/components/responses/Error" tags: - platform + /platform/redirects: + get: + summary: Return a list of redirects from old URLs to new locations + parameters: + - in: query + description: query parameters + name: params + style: form + explode: true + schema: + $ref: "#/components/schemas/UrlRedirectPlansGetQuery" + responses: + "200": + description: A list of redirect plans + content: + application/json: + schema: + $ref: "#/components/schemas/UrlRedirectPlanList" + default: + $ref: "#/components/responses/Error" + tags: + - platform + post: + summary: Create a new redirect plan + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/UrlRedirectPlanPost" + responses: + "201": + description: The redirect info was created + content: + application/json: + schema: + $ref: "#/components/schemas/UrlRedirectPlan" + default: + $ref: "#/components/responses/Error" + tags: + - platform + /platform/redirects/{source_url}: + get: + summary: Return a redirect info for the specified source URL + parameters: + - in: path + name: source_url + required: true + description: The url-encoded source URL + schema: + type: string + responses: + "200": + description: The redirect plan + content: + application/json: + schema: + $ref: "#/components/schemas/UrlRedirectPlan" + "404": + description: No redirect for the source URL + content: + application/json: + schema: + $ref: "#/components/schemas/ErrorResponse" + default: + $ref: "#/components/responses/Error" + tags: + - platform + patch: + summary: Update a redirect plan + parameters: + - in: path + name: source_url + required: true + description: The url-encoded (original) source URL + schema: + type: string + - $ref: "#/components/parameters/If-Match" + requestBody: + required: true + content: + application/json: + schema: + $ref: "#/components/schemas/UrlRedirectPlanPatch" + responses: + "200": + description: The redirect info was updated + content: + application/json: + schema: + $ref: "#/components/schemas/UrlRedirectPlan" + default: + $ref: "#/components/responses/Error" + tags: + - platform + delete: + summary: Delete a redirect plan + parameters: + - in: path + name: source_url + required: true + description: The url-encoded (original) source URL + schema: + type: string + - $ref: "#/components/parameters/If-Match" + responses: + "204": + description: The redirect plan was removed or did not exist in the first place + default: + $ref: "#/components/responses/Error" + tags: + - platform components: schemas: PlatformConfig: @@ -67,6 +179,62 @@ components: incident_banner: $ref: "#/components/schemas/IncidentBanner" additionalProperties: false + SourceUrl: + description: | + The URL that should be redirected. Must begin with `/projects` or + `https://gitlab.renkulab.io/` + type: string + example: + "/projects/ns/slug" + TargetUrl: + description: | + The URL that should be redirected to. Must begin with `https://` or + have the structure `/p/[ULID]` + type: string + example: + "/p/project-ulid" + UrlRedirectPlanList: + description: A list of redirect info objects + type: array + items: + $ref: "#/components/schemas/UrlRedirectPlan" + UrlRedirectPlan: + description: A mapping from src URLs to targets, either Renku entities or URLs + type: object + properties: + etag: + $ref: "#/components/schemas/ETag" + source_url: + $ref: "#/components/schemas/SourceUrl" + target_url: + $ref: "#/components/schemas/TargetUrl" + required: + - etag + - source_url + - target_url + UrlRedirectPlansGetQuery: + description: Query params for redirect list get request + allOf: + - $ref: "#/components/schemas/PaginationRequest" + UrlRedirectPlanPatch: + description: Schema for updating a redirect info + type: object + properties: + target_url: + $ref: "#/components/schemas/TargetUrl" + additionalProperties: false + UrlRedirectPlanPost: + description: Schema for creating a new redirect info + type: object + properties: + source_url: + $ref: "#/components/schemas/SourceUrl" + target_url: + $ref: "#/components/schemas/TargetUrl" + required: + - source_url + - target_url + additionalProperties: false ETag: type: string description: Entity Tag @@ -77,6 +245,21 @@ components: The contents of the incident banner. This value accepts content written using Markdown. example: RenkuLab is experiencing issues, some features may be unavailable. + PaginationRequest: + type: object + additionalProperties: false + properties: + page: + description: Result's page number starting from 1 + type: integer + minimum: 1 + default: 1 + per_page: + description: The number of results per page + type: integer + minimum: 1 + maximum: 100 + default: 20 ErrorResponse: type: object properties: diff --git a/components/renku_data_services/platform/apispec.py b/components/renku_data_services/platform/apispec.py index 7fac7954c..72a06d07d 100644 --- a/components/renku_data_services/platform/apispec.py +++ b/components/renku_data_services/platform/apispec.py @@ -1,15 +1,52 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2025-03-19T10:21:15+00:00 +# timestamp: 2025-09-22T11:13:16+00:00 from __future__ import annotations -from typing import Optional +from typing import List, Optional from pydantic import ConfigDict, Field, RootModel from renku_data_services.platform.apispec_base import BaseAPISpec +class UrlRedirectPlanPatch(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + target_url: Optional[str] = Field( + None, + description="The URL that should be redirected to. Must begin with `https://` or\nhave the structure `/p/[ULID]`\n", + examples=["/p/project-ulid"], + ) + + +class UrlRedirectPlanPost(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + source_url: str = Field( + ..., + description="The URL that should be redirected. Must begin with `/projects` or\n`https://gitlab.renkulab.io/`\n", + examples=["/projects/ns/slug"], + ) + target_url: str = Field( + ..., + description="The URL that should be redirected to. Must begin with `https://` or\nhave the structure `/p/[ULID]`\n", + examples=["/p/project-ulid"], + ) + + +class PaginationRequest(BaseAPISpec): + model_config = ConfigDict( + extra="forbid", + ) + page: int = Field(1, description="Result's page number starting from 1", ge=1) + per_page: int = Field( + 20, description="The number of results per page", ge=1, le=100 + ) + + class Error(BaseAPISpec): code: int = Field(..., examples=[1404], gt=0) detail: Optional[str] = Field( @@ -47,3 +84,33 @@ class PlatformConfigPatch(BaseAPISpec): description="The contents of the incident banner.\nThis value accepts content written using Markdown.\n", examples=["RenkuLab is experiencing issues, some features may be unavailable."], ) + + +class UrlRedirectPlan(BaseAPISpec): + etag: str = Field( + ..., description="Entity Tag", examples=["9EE498F9D565D0C41E511377425F32F3"] + ) + source_url: str = Field( + ..., + description="The URL that should be redirected. Must begin with `/projects` or\n`https://gitlab.renkulab.io/`\n", + examples=["/projects/ns/slug"], + ) + target_url: str = Field( + ..., + description="The URL that should be redirected to. Must begin with `https://` or\nhave the structure `/p/[ULID]`\n", + examples=["/p/project-ulid"], + ) + + +class UrlRedirectPlansGetQuery(PaginationRequest): + pass + + +class PlatformRedirectsGetParametersQuery(BaseAPISpec): + params: Optional[UrlRedirectPlansGetQuery] = None + + +class UrlRedirectPlanList(RootModel[List[UrlRedirectPlan]]): + root: List[UrlRedirectPlan] = Field( + ..., description="A list of redirect info objects" + ) diff --git a/components/renku_data_services/platform/blueprints.py b/components/renku_data_services/platform/blueprints.py index ecb452fbb..4bbc5f638 100644 --- a/components/renku_data_services/platform/blueprints.py +++ b/components/renku_data_services/platform/blueprints.py @@ -1,6 +1,8 @@ """Platform configuration blueprint.""" +import urllib.parse from dataclasses import dataclass +from typing import Any from sanic import Request, empty from sanic.response import HTTPResponse, JSONResponse @@ -10,10 +12,17 @@ from renku_data_services.base_api.auth import authenticate, only_admins from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint from renku_data_services.base_api.etag import extract_if_none_match, if_match_required -from renku_data_services.base_models.validation import validated_json +from renku_data_services.base_api.misc import validate_query +from renku_data_services.base_api.pagination import PaginationRequest, paginate +from renku_data_services.base_models.validation import validate_and_dump, validated_json from renku_data_services.platform import apispec -from renku_data_services.platform.core import validate_platform_config_patch -from renku_data_services.platform.db import PlatformRepository +from renku_data_services.platform.core import ( + validate_platform_config_patch, + validate_url_redirect_patch, + validate_url_redirect_post, +) +from renku_data_services.platform.db import PlatformRepository, UrlRedirectRepository +from renku_data_services.platform.models import UrlRedirectConfig @dataclass(kw_only=True) @@ -68,3 +77,106 @@ async def _patch_singleton_configuration( ) return "/platform/config", ["PATCH"], _patch_singleton_configuration + + +@dataclass(kw_only=True) +class PlatformUrlRedirectBP(CustomBlueprint): + """Handlers for the platform redirects.""" + + url_redirect_repo: UrlRedirectRepository + authenticator: base_models.Authenticator + + @staticmethod + def _dump_redirect(redirect: UrlRedirectConfig) -> dict[str, str]: + """Dumps a project for API responses.""" + result = dict( + etag=redirect.etag, + source_url=redirect.source_url, + target_url=redirect.target_url, + ) + return result + + def delete_url_redirect_config(self) -> BlueprintFactoryResponse: + """Delete a specific redirect config.""" + + @authenticate(self.authenticator) + @only_admins + @if_match_required + async def _delete_url_redirect_config( + _: Request, user: base_models.APIUser, url: str, etag: str + ) -> HTTPResponse: + source_url = urllib.parse.unquote(url) + await self.url_redirect_repo.delete_redirect_config(user=user, etag=etag, source_url=source_url) + return HTTPResponse(status=204) + + return "/platform/redirects/", ["DELETE"], _delete_url_redirect_config + + def get_url_redirect_configs(self) -> BlueprintFactoryResponse: + """List all redirects.""" + + @authenticate(self.authenticator) + @only_admins + @validate_query(query=apispec.PaginationRequest) + @paginate + async def _get_all_redirects( + _: Request, + user: base_models.APIUser, + pagination: PaginationRequest, + query: apispec.UrlRedirectPlansGetQuery, + ) -> tuple[list[dict[str, Any]], int]: + redirects, total_num = await self.url_redirect_repo.get_redirect_configs(user=user, pagination=pagination) + + redirects_list: list[dict[str, Any]] = [ + validate_and_dump(apispec.UrlRedirectPlan, self._dump_redirect(r)) for r in redirects + ] + return redirects_list, total_num + + return "/platform/redirects", ["GET"], _get_all_redirects + + def get_url_redirect_config(self) -> BlueprintFactoryResponse: + """Get a specific redirect config.""" + + @authenticate(self.authenticator) + async def _get_url_redirect_config(_: Request, user: base_models.APIUser, url: str) -> JSONResponse: + source_url = urllib.parse.unquote(url) + redirect = await self.url_redirect_repo.get_redirect_config_by_source_url(user=user, source_url=source_url) + return validated_json(apispec.UrlRedirectPlan, redirect) + + return "/platform/redirects/", ["GET"], _get_url_redirect_config + + def post_url_redirect_config(self) -> BlueprintFactoryResponse: + """Create a new redirect config.""" + + @authenticate(self.authenticator) + @only_admins + @validate(json=apispec.UrlRedirectPlanPost) + async def _post_redirect_config( + _: Request, + user: base_models.APIUser, + body: apispec.UrlRedirectPlanPost, + ) -> JSONResponse: + url_redirect_post = validate_url_redirect_post(body) + redirect = await self.url_redirect_repo.create_redirect_config(user=user, post=url_redirect_post) + return validated_json(apispec.UrlRedirectPlan, redirect, status=201) + + return "/platform/redirects", ["POST"], _post_redirect_config + + def patch_url_redirect_config(self) -> BlueprintFactoryResponse: + """Update a specific redirect config.""" + + @authenticate(self.authenticator) + @only_admins + @if_match_required + @validate(json=apispec.UrlRedirectPlanPatch) + async def _patch_url_redirect_config( + _: Request, user: base_models.APIUser, url: str, body: apispec.UrlRedirectPlanPatch, etag: str + ) -> JSONResponse: + source_url = urllib.parse.unquote(url) + url_redirect_patch = validate_url_redirect_patch(source_url, body) + + updated_redirect = await self.url_redirect_repo.update_redirect_config( + user=user, etag=etag, patch=url_redirect_patch + ) + return validated_json(apispec.UrlRedirectPlan, updated_redirect) + + return "/platform/redirects/", ["PATCH"], _patch_url_redirect_config diff --git a/components/renku_data_services/platform/core.py b/components/renku_data_services/platform/core.py index 6653bbbeb..3ffc2e676 100644 --- a/components/renku_data_services/platform/core.py +++ b/components/renku_data_services/platform/core.py @@ -1,8 +1,96 @@ """Business logic for the platform configuration.""" +import os +import re +from urllib.parse import ParseResult, urlparse + +from renku_data_services import errors from renku_data_services.platform import apispec, models +v2_project_pattern = re.compile(r"^/p/[0-7][0-9A-HJKMNP-TV-Z]{25}$") + def validate_platform_config_patch(patch: apispec.PlatformConfigPatch) -> models.PlatformConfigPatch: """Validate the update to the platform configuration.""" return models.PlatformConfigPatch(incident_banner=patch.incident_banner) + + +def _ensure_no_extras(parsed: ParseResult, position: str) -> None: + """Ensure that the parsed URL has no extra components.""" + if parsed.params or parsed.query or parsed.fragment: + raise errors.ValidationError(message=f"The {position} URL must not include parameters, a query, or a fragment.") + canonical_path = os.path.normpath(parsed.path) + if parsed.path != canonical_path: + raise errors.ValidationError(message=f"The {position} URL path is not canonical.") + + +def _validate_source_gitlab_url(parsed: ParseResult) -> str: + """Validate the GitLab URL.""" + if parsed.scheme != "https": + raise errors.ValidationError(message="The source URL must use HTTPS.") + if parsed.netloc != "gitlab.renkulab.io": + raise errors.ValidationError(message="The source URL host must be gitlab.renkulab.io.") + return parsed.geturl() + + +def _validate_source_v1_url(parsed: ParseResult) -> str: + """Validate the source V1 URL.""" + if parsed.scheme: + raise errors.ValidationError(message="The source URL should not include a scheme.") + if parsed.netloc: + raise errors.ValidationError(message="The source URL should not include a host.") + if not parsed.path.startswith("/projects/"): + raise errors.ValidationError(message="The source URL must start with /projects/.") + return parsed.geturl() + + +def _validate_target_external_url(parsed: ParseResult) -> str: + """Validate the external target URL.""" + if parsed.scheme != "https": + raise errors.ValidationError(message="The target URL must use HTTPS.") + return parsed.geturl() + + +def _validate_target_v2_project_url(parsed: ParseResult) -> str: + """Validate the target V2 project URL.""" + if parsed.scheme: + raise errors.ValidationError(message="The target URL should not include a scheme.") + if parsed.netloc: + raise errors.ValidationError(message="The target URL should not include a host.") + if not v2_project_pattern.match(parsed.path): + raise errors.ValidationError(message="The target URL path must match the pattern /p/ULID.") + return parsed.geturl() + + +def validate_source_url(url: str) -> str: + """Validate the source URL.""" + parsed = urlparse(url) + _ensure_no_extras(parsed, "source") + if parsed.scheme: + return _validate_source_gitlab_url(parsed) + return _validate_source_v1_url(parsed) + + +def validate_target_url(url: str) -> str: + """Validate the target URL.""" + parsed = urlparse(url) + _ensure_no_extras(parsed, "target") + if parsed.scheme or parsed.netloc: + return _validate_target_external_url(parsed) + return _validate_target_v2_project_url(parsed) + + +def validate_url_redirect_patch(source_url: str, patch: apispec.UrlRedirectPlanPatch) -> models.UrlRedirectUpdateConfig: + """Validate the update of a URL redirect.""" + return models.UrlRedirectUpdateConfig( + source_url=validate_source_url(source_url), + target_url=validate_target_url(patch.target_url) if patch.target_url is not None else None, + ) + + +def validate_url_redirect_post(post: apispec.UrlRedirectPlanPost) -> models.UnsavedUrlRedirectConfig: + """Validate the creation of a URL redirect.""" + return models.UnsavedUrlRedirectConfig( + source_url=validate_source_url(post.source_url), + target_url=validate_target_url(post.target_url), + ) diff --git a/components/renku_data_services/platform/db.py b/components/renku_data_services/platform/db.py index 60370bb43..1706524ed 100644 --- a/components/renku_data_services/platform/db.py +++ b/components/renku_data_services/platform/db.py @@ -2,10 +2,12 @@ from collections.abc import Callable -from sqlalchemy import select +from sqlalchemy import func, select from sqlalchemy.ext.asyncio import AsyncSession from renku_data_services import base_models, errors +from renku_data_services.authz.authz import Authz +from renku_data_services.base_api.pagination import PaginationRequest from renku_data_services.platform import models from renku_data_services.platform import orm as schemas @@ -56,3 +58,135 @@ async def update_config( await session.refresh(config) return config.dump() + + +class UrlRedirectRepository: + """Repository for URL redirects.""" + + def __init__( + self, + session_maker: Callable[..., AsyncSession], + authz: Authz, + ) -> None: + self.session_maker = session_maker + self.authz = authz + + async def _get_redirect_config_by_source_url( + self, session: AsyncSession, source_url: str + ) -> schemas.UrlRedirectsORM | None: + stmt = select(schemas.UrlRedirectsORM).where(schemas.UrlRedirectsORM.source_url == source_url) + result = await session.execute(stmt) + config: schemas.UrlRedirectsORM | None = result.scalar_one_or_none() + return config + + async def get_redirect_configs( + self, + user: base_models.APIUser, + pagination: PaginationRequest, + ) -> tuple[list[models.UrlRedirectConfig], int]: + """Get all url redirect configs from the database.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + if not user.is_admin: + raise errors.ForbiddenError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session: + stmt = select(schemas.UrlRedirectsORM) + stmt.limit(pagination.per_page).offset(pagination.offset) + stmt_count = select(func.count()).select_from(schemas.UrlRedirectsORM) + results = await session.stream_scalars(stmt), await session.scalar(stmt_count) + redirects = await results[0].all() + return [r.dump() for r in redirects], results[1] or 0 + + async def get_redirect_config_by_source_url( + self, user: base_models.APIUser, source_url: str + ) -> models.UrlRedirectConfig: + """Retrieve redirect config for a given source URL.""" + + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session: + url_redirect_orm = await self._get_redirect_config_by_source_url(session, source_url) + + if not url_redirect_orm: + raise errors.MissingResourceError( + message=f"A redirect for '{source_url}' does not exist or you do not have access to it." + ) + return url_redirect_orm.dump() + + async def create_redirect_config( + self, user: base_models.APIUser, post: models.UnsavedUrlRedirectConfig + ) -> models.UrlRedirectConfig: + """Create a new URL redirect config.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + if not user.is_admin: + raise errors.ForbiddenError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + existing = await self._get_redirect_config_by_source_url(session, post.source_url) + if existing is not None: + raise errors.ConflictError(message=f"A redirect for source URL '{post.source_url}' already exists.") + + redirect_orm = schemas.UrlRedirectsORM( + source_url=post.source_url, + target_url=post.target_url, + ) + session.add(redirect_orm) + await session.flush() + await session.refresh(redirect_orm) + return redirect_orm.dump() + + async def delete_redirect_config( + self, user: base_models.APIUser, etag: str, source_url: str + ) -> models.UrlRedirectUpdateConfig: + """Update a URL redirect configuration.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + if not user.is_admin: + raise errors.ForbiddenError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + existing = await self._get_redirect_config_by_source_url(session, source_url) + if existing is None: + return models.UrlRedirectUpdateConfig( + source_url=source_url, + target_url=None, + ) + + current_etag = existing.dump().etag + if current_etag != etag: + raise errors.ConflictError(message=f"Current ETag is {current_etag}, not {etag}.") + + await session.delete(existing) + return models.UrlRedirectUpdateConfig( + source_url=source_url, + target_url=None, + ) + + async def update_redirect_config( + self, user: base_models.APIUser, etag: str, patch: models.UrlRedirectUpdateConfig + ) -> models.UrlRedirectConfig: + """Update a URL redirect configuration.""" + if user.id is None: + raise errors.UnauthorizedError(message="You do not have the required permissions for this operation.") + if not user.is_admin: + raise errors.ForbiddenError(message="You do not have the required permissions for this operation.") + + async with self.session_maker() as session, session.begin(): + existing = await self._get_redirect_config_by_source_url(session, patch.source_url) + if existing is None: + raise errors.MissingResourceError( + message=f"A redirect for source URL '{patch.source_url}' does not exist." + ) + + current_etag = existing.dump().etag + if current_etag != etag: + raise errors.ConflictError(message=f"Current ETag is {current_etag}, not {etag}.") + if patch.target_url is not None: + existing.target_url = patch.target_url + session.add(existing) + await session.flush() + await session.refresh(existing) + return existing.dump() diff --git a/components/renku_data_services/platform/models.py b/components/renku_data_services/platform/models.py index 59a933460..d9cf5cdfd 100644 --- a/components/renku_data_services/platform/models.py +++ b/components/renku_data_services/platform/models.py @@ -4,6 +4,8 @@ from datetime import UTC, datetime from enum import StrEnum +from ulid import ULID + from renku_data_services.utils.etag import compute_etag_from_timestamp @@ -33,3 +35,33 @@ class PlatformConfigPatch: """Model for changes requested on the platform configuration.""" incident_banner: str | None = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UnsavedUrlRedirectConfig: + """Model representing a URL redirect that has not been persisted.""" + + source_url: str + target_url: str + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UrlRedirectUpdateConfig: + """Model representing a URL redirect that has not been persisted.""" + + source_url: str + target_url: str | None = None + + +@dataclass(frozen=True, eq=True, kw_only=True) +class UrlRedirectConfig(UnsavedUrlRedirectConfig): + """Model representing a redirect from a source URL to a target.""" + + id: ULID + creation_date: datetime = field(default_factory=lambda: datetime.now(UTC).replace(microsecond=0)) + updated_at: datetime + + @property + def etag(self) -> str: + """Entity tag value for this redirect object.""" + return compute_etag_from_timestamp(self.updated_at) diff --git a/components/renku_data_services/platform/orm.py b/components/renku_data_services/platform/orm.py index e8b57e2ae..29186de09 100644 --- a/components/renku_data_services/platform/orm.py +++ b/components/renku_data_services/platform/orm.py @@ -2,10 +2,12 @@ from datetime import datetime -from sqlalchemy import DateTime, MetaData, String, func +from sqlalchemy import DateTime, MetaData, String, func, text from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column +from ulid import ULID from renku_data_services.platform import models +from renku_data_services.utils.sqlalchemy import ULIDType metadata_obj = MetaData(schema="platform") @@ -50,3 +52,49 @@ def dump(self) -> models.PlatformConfig: creation_date=self.creation_date, updated_at=self.updated_at, ) + + +class UrlRedirectsORM(BaseORM): + """The url redirects.""" + + __tablename__ = "url_redirects" + + id: Mapped[ULID] = mapped_column( + "id", + ULIDType, + primary_key=True, + default_factory=lambda: str(ULID()), + init=False, + server_default=text("generate_ulid()"), + ) + + source_url: Mapped[str] = mapped_column("source_url", String(), unique=True, index=True) + """The source URL for the redirect.""" + + target_url: Mapped[str] = mapped_column("target_url", String(), index=True) + """The target URL for the redirect.""" + + creation_date: Mapped[datetime] = mapped_column( + "creation_date", DateTime(timezone=True), default=None, server_default=func.now(), nullable=False + ) + """Creation date and time.""" + + updated_at: Mapped[datetime] = mapped_column( + "updated_at", + DateTime(timezone=True), + default=None, + server_default=func.now(), + onupdate=func.now(), + nullable=False, + ) + """Date and time of the last update.""" + + def dump(self) -> models.UrlRedirectConfig: + """Create a UrlRedirectConfig from the UrlRedirectsORM.""" + return models.UrlRedirectConfig( + id=self.id, + source_url=self.source_url, + target_url=self.target_url, + creation_date=self.creation_date, + updated_at=self.updated_at, + ) diff --git a/components/renku_data_services/project/db.py b/components/renku_data_services/project/db.py index 533998a3d..73f033d23 100644 --- a/components/renku_data_services/project/db.py +++ b/components/renku_data_services/project/db.py @@ -11,7 +11,7 @@ from typing import Concatenate, ParamSpec, TypeVar from cryptography.hazmat.primitives.asymmetric import rsa -from sqlalchemy import Select, delete, func, select, update +from sqlalchemy import ColumnElement, Select, delete, func, or_, select, update from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.orm import undefer from sqlalchemy.sql.functions import coalesce @@ -20,7 +20,7 @@ import renku_data_services.base_models as base_models from renku_data_services import errors from renku_data_services.authz.authz import Authz, AuthzOperation, ResourceType -from renku_data_services.authz.models import CheckPermissionItem, Member, MembershipChange, Scope +from renku_data_services.authz.models import CheckPermissionItem, Member, MembershipChange, Scope, Visibility from renku_data_services.base_api.pagination import PaginationRequest from renku_data_services.base_models import RESET from renku_data_services.base_models.core import Slug @@ -32,7 +32,6 @@ from renku_data_services.search.db import SearchUpdatesRepo from renku_data_services.search.decorators import update_search_document from renku_data_services.secrets import orm as secrets_schemas -from renku_data_services.secrets.core import encrypt_user_secret from renku_data_services.secrets.models import SecretKind from renku_data_services.session import apispec as session_apispec from renku_data_services.session.core import ( @@ -136,14 +135,17 @@ async def get_all_copied_projects( ) async with self.session_maker() as session: - stmt = select(schemas.ProjectORM).where(schemas.ProjectORM.template_id == project_id) - result = await session.execute(stmt) - project_orms = result.scalars().all() - # NOTE: Show only those projects that user has access to - scope = Scope.WRITE if only_writable else Scope.READ + scope = Scope.WRITE if only_writable else Scope.NON_PUBLIC_READ project_ids = await self.authz.resources_with_permission(user, user.id, ResourceType.project, scope=scope) - project_orms = [p for p in project_orms if p.id in project_ids] + + cond: ColumnElement[bool] = schemas.ProjectORM.id.in_(project_ids) + if scope == Scope.NON_PUBLIC_READ: + cond = or_(cond, schemas.ProjectORM.visibility == Visibility.PUBLIC.value) + + stmt = select(schemas.ProjectORM).where(schemas.ProjectORM.template_id == project_id).where(cond) + result = await session.execute(stmt) + project_orms = result.scalars().all() return [p.dump() for p in project_orms] @@ -832,8 +834,7 @@ async def patch_session_secrets( del existing_secrets_as_dict[slot_id] continue - encrypted_value, encrypted_key = await encrypt_user_secret( - user_repo=self.user_repo, + encrypted_value, encrypted_key = await self.user_repo.encrypt_user_secret( requested_by=user, secret_service_public_key=self.secret_service_public_key, secret_value=secret_update.value, @@ -967,6 +968,7 @@ async def migrate_v1_project( args=constants.MIGRATION_ARGS, is_archived=False, environment_image_source=session_apispec.EnvironmentImageSourceImage.image, + strip_path_prefix=False, ), env_variables=None, ) diff --git a/components/renku_data_services/repositories/api.spec.yaml b/components/renku_data_services/repositories/api.spec.yaml index 4953d18a1..55e313e69 100644 --- a/components/renku_data_services/repositories/api.spec.yaml +++ b/components/renku_data_services/repositories/api.spec.yaml @@ -51,7 +51,7 @@ paths: type: string responses: "200": - description: The repository seems to be availabe. + description: The repository seems to be available. "404": description: There is no available provider for this repository. default: diff --git a/components/renku_data_services/repositories/blueprints.py b/components/renku_data_services/repositories/blueprints.py index c07e22c67..e1bc7b12a 100644 --- a/components/renku_data_services/repositories/blueprints.py +++ b/components/renku_data_services/repositories/blueprints.py @@ -32,7 +32,7 @@ def get_one_repository(self) -> BlueprintFactoryResponse: @authenticate_2(self.authenticator, self.internal_gitlab_authenticator) @extract_if_none_match async def _get_one_repository( - request: Request, + _: Request, user: base_models.APIUser, internal_gitlab_user: base_models.APIUser, repository_url: str, diff --git a/components/renku_data_services/repositories/db.py b/components/renku_data_services/repositories/db.py index f48607e13..42fb5957d 100644 --- a/components/renku_data_services/repositories/db.py +++ b/components/renku_data_services/repositories/db.py @@ -14,6 +14,7 @@ from renku_data_services import errors from renku_data_services.connected_services import orm as connected_services_schemas from renku_data_services.connected_services.db import ConnectedServicesRepository +from renku_data_services.connected_services.utils import GitHubProviderType, get_github_provider_type from renku_data_services.repositories import models from renku_data_services.repositories.provider_adapters import ( get_internal_gitlab_adapter, @@ -29,10 +30,18 @@ def __init__( session_maker: Callable[..., AsyncSession], connected_services_repo: ConnectedServicesRepository, internal_gitlab_url: str | None, + enable_internal_gitlab: bool, ): self.session_maker = session_maker self.connected_services_repo = connected_services_repo self.internal_gitlab_url = internal_gitlab_url + self.enable_internal_gitlab = enable_internal_gitlab + + def __include_repository_provider(self, c: connected_services_schemas.OAuth2ClientORM, repo_netloc: str) -> bool: + github_type = get_github_provider_type(c) + return urlparse(c.url).netloc == repo_netloc and ( + not github_type or github_type == GitHubProviderType.standard_app + ) async def get_repository( self, @@ -48,19 +57,19 @@ async def get_repository( result_clients = await session.scalars(select(connected_services_schemas.OAuth2ClientORM)) clients = result_clients.all() - matched_client = next(filter(lambda x: urlparse(x.url).netloc == repository_netloc, clients), None) - - if self.internal_gitlab_url: - internal_gitlab_netloc = urlparse(self.internal_gitlab_url).netloc - if matched_client is None and internal_gitlab_netloc == repository_netloc: - return await self._get_repository_from_internal_gitlab( - repository_url=repository_url, - user=internal_gitlab_user, - etag=etag, - internal_gitlab_url=self.internal_gitlab_url, - ) + matched_client = next(filter(lambda x: self.__include_repository_provider(x, repository_netloc), clients), None) if matched_client is None: + if self.enable_internal_gitlab and self.internal_gitlab_url: + internal_gitlab_netloc = urlparse(self.internal_gitlab_url).netloc + if internal_gitlab_netloc == repository_netloc: + return await self._get_repository_from_internal_gitlab( + repository_url=repository_url, + user=internal_gitlab_user, + etag=etag, + internal_gitlab_url=self.internal_gitlab_url, + ) + raise errors.MissingResourceError(message=f"No OAuth2 Client found for repository {repository_url}.") async with self.session_maker() as session: diff --git a/components/renku_data_services/repositories/provider_adapters.py b/components/renku_data_services/repositories/provider_adapters.py index 558ea3caf..61f3cbd85 100644 --- a/components/renku_data_services/repositories/provider_adapters.py +++ b/components/renku_data_services/repositories/provider_adapters.py @@ -8,7 +8,7 @@ from renku_data_services import errors from renku_data_services.app_config import logging from renku_data_services.connected_services import orm as connected_services_schemas -from renku_data_services.connected_services.apispec import ProviderKind +from renku_data_services.connected_services.models import ProviderKind from renku_data_services.repositories import external_models, models logger = logging.getLogger(__name__) @@ -73,12 +73,15 @@ def api_validate_repository_response( class GitHubAdapter(GitProviderAdapter): - """Adapter for GitLab OAuth2 clients.""" + """Adapter for GitHub OAuth2 clients.""" @property def api_url(self) -> str: """The URL used for API calls on the Resource Server.""" url = urlparse(self.client_url) + # See: https://docs.github.com/en/apps/sharing-github-apps/making-your-github-app-available-for-github-enterprise-server#the-app-code-must-use-the-correct-urls + if url.netloc != "github.com": + return urljoin(self.client_url, "api/v3/") url = url._replace(netloc=f"api.{url.netloc}") return urlunparse(url) diff --git a/components/renku_data_services/secrets/api.spec.yaml b/components/renku_data_services/secrets/api.spec.yaml index d45fde9f1..f86a08cde 100644 --- a/components/renku_data_services/secrets/api.spec.yaml +++ b/components/renku_data_services/secrets/api.spec.yaml @@ -71,6 +71,8 @@ components: uid: c9328118-8d32-41b4-b9bd-1437880c95a2 key_mapping: $ref: "#/components/schemas/KeyMapping" + cluster_id: + $ref: "#/components/schemas/Ulid" required: - name - namespace diff --git a/components/renku_data_services/secrets/apispec.py b/components/renku_data_services/secrets/apispec.py index c28af21bc..a34d34cc5 100644 --- a/components/renku_data_services/secrets/apispec.py +++ b/components/renku_data_services/secrets/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2025-03-19T10:21:09+00:00 +# timestamp: 2025-07-24T05:48:09+00:00 from __future__ import annotations @@ -68,3 +68,10 @@ class K8sSecret(BaseAPISpec): ], ) key_mapping: Optional[Dict[str, Union[str, List[str]]]] = None + cluster_id: Optional[str] = Field( + None, + description="ULID identifier", + max_length=26, + min_length=26, + pattern="^[0-7][0-9A-HJKMNP-TV-Z]{25}$", + ) diff --git a/components/renku_data_services/secrets/blueprints.py b/components/renku_data_services/secrets/blueprints.py index 22f95ed84..34ef0d6de 100644 --- a/components/renku_data_services/secrets/blueprints.py +++ b/components/renku_data_services/secrets/blueprints.py @@ -6,16 +6,15 @@ from sanic import Request, json from sanic.response import JSONResponse from sanic_ext import validate -from ulid import ULID -import renku_data_services.base_models as base_models +from renku_data_services import base_models from renku_data_services.base_api.auth import authenticate, only_authenticated from renku_data_services.base_api.blueprint import BlueprintFactoryResponse, CustomBlueprint -from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface +from renku_data_services.errors import errors +from renku_data_services.k8s.client_interfaces import SecretClient from renku_data_services.secrets import apispec -from renku_data_services.secrets.core import create_k8s_secret +from renku_data_services.secrets.core import validate_secret from renku_data_services.secrets.db import LowLevelUserSecretsRepo -from renku_data_services.secrets.models import OwnerReference @dataclass(kw_only=True) @@ -26,7 +25,7 @@ class K8sSecretsBP(CustomBlueprint): user_secrets_repo: LowLevelUserSecretsRepo secret_service_private_key: rsa.RSAPrivateKey previous_secret_service_private_key: rsa.RSAPrivateKey | None - core_client: K8sCoreClientInterface + client: SecretClient def post(self) -> BlueprintFactoryResponse: """Create a new K8s secret from a user secret.""" @@ -35,23 +34,22 @@ def post(self) -> BlueprintFactoryResponse: @only_authenticated @validate(json=apispec.K8sSecret) async def _post(_: Request, user: base_models.APIUser, body: apispec.K8sSecret) -> JSONResponse: - owner_references = [] - if body.owner_references: - owner_references = [OwnerReference.from_dict(o) for o in body.owner_references] - secret_ids = [ULID.from_str(id.root) for id in body.secret_ids] - await create_k8s_secret( + secret = await validate_secret( user=user, - secret_name=body.name, - namespace=body.namespace, - secret_ids=secret_ids, - owner_references=owner_references, + body=body, secrets_repo=self.user_secrets_repo, secret_service_private_key=self.secret_service_private_key, previous_secret_service_private_key=self.previous_secret_service_private_key, - core_client=self.core_client, - key_mapping=body.key_mapping, ) - return json(body.name, 201) + try: + result = await self.client.create_secret(secret) + except Exception as e: + # don't wrap the error, we don't want secrets accidentally leaking. + raise errors.SecretCreationError( + message=f"An error occurred creating secrets: {str(type(e))}" + ) from None + + return json(result.name, 201) return "/kubernetes", ["POST"], _post diff --git a/components/renku_data_services/secrets/config.py b/components/renku_data_services/secrets/config.py index a4cb2bfa6..7529ad490 100644 --- a/components/renku_data_services/secrets/config.py +++ b/components/renku_data_services/secrets/config.py @@ -25,13 +25,17 @@ def from_env(cls) -> Self: """Load config from environment variables.""" if os.environ.get("DUMMY_STORES", "false").lower() == "true": public_key_path = os.getenv("SECRETS_SERVICE_PUBLIC_KEY_PATH") - encryption_key = secrets.token_bytes(32) if public_key_path is not None: public_key = serialization.load_pem_public_key(Path(public_key_path).read_bytes()) else: # generate new random key private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) public_key = private_key.public_key() + encryption_key_path = os.getenv("ENCRYPTION_KEY_PATH") + if encryption_key_path is not None: + encryption_key = Path(encryption_key_path).read_bytes() + else: + encryption_key = secrets.token_bytes(32) else: public_key_path = os.getenv("SECRETS_SERVICE_PUBLIC_KEY_PATH", "/secret_service_public_key") encryption_key_path = os.getenv("ENCRYPTION_KEY_PATH", "encryption_key") diff --git a/components/renku_data_services/secrets/core.py b/components/renku_data_services/secrets/core.py index 452ff06bc..c6772934e 100644 --- a/components/renku_data_services/secrets/core.py +++ b/components/renku_data_services/secrets/core.py @@ -1,26 +1,26 @@ """Business logic for secrets storage.""" -import asyncio +from __future__ import annotations + +import logging from base64 import b64encode from typing import TYPE_CHECKING +from box import Box from cryptography.hazmat.primitives.asymmetric import rsa +from kr8s.objects import Secret from kubernetes import client as k8s_client -from prometheus_client import Counter, Enum from ulid import ULID from renku_data_services import base_models, errors -from renku_data_services.app_config import logging -from renku_data_services.base_models.core import InternalServiceAdmin -from renku_data_services.k8s.client_interfaces import K8sCoreClientInterface -from renku_data_services.secrets.models import OwnerReference, Secret -from renku_data_services.users.db import UserRepo +from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER, ClusterId +from renku_data_services.k8s.models import GVK, K8sSecret, sanitizer +from renku_data_services.secrets import apispec +from renku_data_services.secrets.db import LowLevelUserSecretsRepo +from renku_data_services.secrets.models import OwnerReference from renku_data_services.utils.cryptography import ( decrypt_rsa, decrypt_string, - encrypt_rsa, - encrypt_string, - generate_random_encryption_key, ) logger = logging.getLogger(__name__) @@ -29,19 +29,21 @@ from renku_data_services.secrets.db import LowLevelUserSecretsRepo -async def create_k8s_secret( +async def validate_secret( user: base_models.APIUser, - secret_name: str, - namespace: str, - secret_ids: list[ULID], - owner_references: list[OwnerReference], - secrets_repo: "LowLevelUserSecretsRepo", + body: apispec.K8sSecret, + secrets_repo: LowLevelUserSecretsRepo, secret_service_private_key: rsa.RSAPrivateKey, previous_secret_service_private_key: rsa.RSAPrivateKey | None, - core_client: K8sCoreClientInterface, - key_mapping: dict[str, str | list[str]] | None, -) -> None: +) -> K8sSecret: """Creates a single k8s secret from a list of user secrets stored in the DB.""" + cluster_id = ClusterId(ULID.from_str(body.cluster_id)) if body.cluster_id is not None else DEFAULT_K8S_CLUSTER + + owner_references = [] + if body.owner_references: + owner_references = [OwnerReference.from_dict(o) for o in body.owner_references] + secret_ids = [ULID.from_str(id.root) for id in body.secret_ids] + secrets = await secrets_repo.get_secrets_by_ids(requested_by=user, secret_ids=secret_ids) found_secret_ids = {str(s.id) for s in secrets} requested_secret_ids = set(map(str, secret_ids)) @@ -49,10 +51,12 @@ async def create_k8s_secret( if len(missing_secret_ids) > 0: raise errors.MissingResourceError(message=f"Couldn't find secrets with ids {', '.join(missing_secret_ids)}") - def ensure_list(value: str | list[str]) -> list[str]: + def _ensure_list(value: str | list[str]) -> list[str]: return [value] if isinstance(value, str) else value - key_mapping_with_lists_only = {key: ensure_list(key_mapping[key]) for key in key_mapping} if key_mapping else None + key_mapping_with_lists_only = ( + {key: _ensure_list(value) for key, value in body.key_mapping.items()} if body.key_mapping else None + ) if key_mapping_with_lists_only: if key_mapping_with_lists_only.keys() != requested_secret_ids: @@ -90,115 +94,20 @@ def ensure_list(value: str | list[str]) -> list[str]: owner_refs = [] if owner_references: owner_refs = [o.to_k8s() for o in owner_references] - secret = k8s_client.V1Secret( + + v1_secret = k8s_client.V1Secret( data=decrypted_secrets, metadata=k8s_client.V1ObjectMeta( - name=secret_name, - namespace=namespace, + name=body.name, + namespace=body.namespace, owner_references=owner_refs, ), ) - try: - core_client.create_namespaced_secret(namespace, secret) - except k8s_client.ApiException as e: - if e.status == 409: - logger.info( - f"Found that secret {namespace}/{secret_name} already exists when trying to create it, " - "the existing secret will be patched" - ) - sanitized_secret = k8s_client.ApiClient().sanitize_for_serialization(secret) - core_client.patch_namespaced_secret( - namespace, - secret_name, - sanitized_secret, - ) - # don't wrap the error, we don't want secrets accidentally leaking. - raise errors.SecretCreationError(message=f"An error occurred creating secrets: {str(type(e))}") from None - - -async def rotate_encryption_keys( - requested_by: InternalServiceAdmin, - new_key: rsa.RSAPrivateKey, - old_key: rsa.RSAPrivateKey, - secrets_repo: "LowLevelUserSecretsRepo", - batch_size: int = 100, -) -> None: - """Rotate all secrets to a new private key. - - This method undoes the outer encryption and reencrypts with a new key, without touching the inner encryption. - """ - processed_secrets_metrics = Counter( - "secrets_rotation_count", - "Number of secrets rotated", - ) - running_metrics = Enum( - "secrets_rotation_state", "State of secrets rotation", states=["running", "finished", "errored"] + return K8sSecret( + name=v1_secret.metadata.name, + namespace=v1_secret.metadata.namespace, + cluster=cluster_id, + gvk=GVK(group="core", version=Secret.version, kind="Secret"), + manifest=Box(sanitizer(v1_secret)), ) - running_metrics.state("running") - try: - async for batch in secrets_repo.get_all_secrets_batched(requested_by, batch_size): - updated_secrets = [] - for secret, user_id in batch: - new_secret = await rotate_single_encryption_key(secret, user_id, new_key, old_key) - # we need to sleep, otherwise the async scheduler will never yield to other tasks like requests - await asyncio.sleep(0.000001) - if new_secret is not None: - updated_secrets.append(new_secret) - - await secrets_repo.update_secret_values(requested_by, updated_secrets) - processed_secrets_metrics.inc(len(updated_secrets)) - except: - running_metrics.state("errored") - raise - else: - running_metrics.state("finished") - - -async def rotate_single_encryption_key( - secret: Secret, user_id: str, new_key: rsa.RSAPrivateKey, old_key: rsa.RSAPrivateKey -) -> Secret | None: - """Rotate a single secret in place.""" - # try using new key first as a sanity check, in case it was already rotated - try: - _ = decrypt_rsa(new_key, secret.encrypted_key) - except ValueError: - pass - else: - return None # could decrypt with new key, nothing to do - - try: - decryption_key = decrypt_rsa(old_key, secret.encrypted_key) - decrypted_value = decrypt_string(decryption_key, user_id, secret.encrypted_value).encode() - new_encryption_key = generate_random_encryption_key() - encrypted_value = encrypt_string(new_encryption_key, user_id, decrypted_value.decode()) - encrypted_key = encrypt_rsa(new_key.public_key(), new_encryption_key) - return secret.update_encrypted_value(encrypted_value=encrypted_value, encrypted_key=encrypted_key) - except Exception as e: - logger.error(f"Couldn't decrypt secret {secret.name}({secret.id}): {e}") - return None - - -async def encrypt_user_secret( - user_repo: UserRepo, - requested_by: base_models.APIUser, - secret_service_public_key: rsa.RSAPublicKey, - secret_value: str, -) -> tuple[bytes, bytes]: - """Doubly encrypt a secret for a user. - - Since RSA cannot encrypt arbitrary length strings, we use symmetric encryption with a random key and encrypt the - random key with RSA to get it to the secret service. - """ - if requested_by.id is None: - raise errors.ValidationError(message="APIUser has no id") - - user_secret_key = await user_repo.get_or_create_user_secret_key(requested_by=requested_by) - - # encrypt once with user secret - encrypted_value = encrypt_string(user_secret_key.encode(), requested_by.id, secret_value) - # encrypt again with the secret service public key - secret_svc_encryption_key = generate_random_encryption_key() - doubly_encrypted_value = encrypt_string(secret_svc_encryption_key, requested_by.id, encrypted_value.decode()) - encrypted_key = encrypt_rsa(secret_service_public_key, secret_svc_encryption_key) - return doubly_encrypted_value, encrypted_key diff --git a/components/renku_data_services/secrets/db.py b/components/renku_data_services/secrets/db.py index f1003be7f..42c5dec9b 100644 --- a/components/renku_data_services/secrets/db.py +++ b/components/renku_data_services/secrets/db.py @@ -1,5 +1,8 @@ """Database repo for secrets.""" +from __future__ import annotations + +import asyncio import random import string from collections.abc import AsyncGenerator, Callable, Sequence @@ -7,6 +10,7 @@ from typing import cast from cryptography.hazmat.primitives.asymmetric import rsa +from prometheus_client import Counter, Enum from sqlalchemy import delete, select from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession @@ -15,7 +19,6 @@ from renku_data_services.base_api.auth import APIUser, only_authenticated from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId, Slug from renku_data_services.errors import errors -from renku_data_services.secrets.core import encrypt_user_secret from renku_data_services.secrets.models import Secret, SecretKind, SecretPatch, UnsavedSecret from renku_data_services.secrets.orm import SecretORM from renku_data_services.users.db import UserRepo @@ -91,6 +94,43 @@ async def update_secret_values(self, requested_by: InternalServiceAdmin, secrets await session.flush() + async def rotate_encryption_keys( + self, + requested_by: InternalServiceAdmin, + new_key: rsa.RSAPrivateKey, + old_key: rsa.RSAPrivateKey, + batch_size: int = 100, + ) -> None: + """Rotate all secrets to a new private key. + + This method undoes the outer encryption and reencrypts with a new key, without touching the inner encryption. + """ + processed_secrets_metrics = Counter( + "secrets_rotation_count", + "Number of secrets rotated", + ) + running_metrics = Enum( + "secrets_rotation_state", "State of secrets rotation", states=["running", "finished", "errored"] + ) + running_metrics.state("running") + try: + async for batch in self.get_all_secrets_batched(requested_by, batch_size): + updated_secrets = [] + for secret, user_id in batch: + new_secret = await secret.rotate_single_encryption_key(user_id, new_key, old_key) + # we need to sleep, otherwise the async scheduler will never yield to other tasks like requests + await asyncio.sleep(0.000001) + if new_secret is not None: + updated_secrets.append(new_secret) + + await self.update_secret_values(requested_by, updated_secrets) + processed_secrets_metrics.inc(len(updated_secrets)) + except: + running_metrics.state("errored") + raise + else: + running_metrics.state("finished") + class UserSecretsRepo: """An adapter for accessing users secrets with encryption handling.""" @@ -138,8 +178,7 @@ async def insert_secret(self, requested_by: APIUser, secret: UnsavedSecret) -> S name_slug = Slug.from_name(secret.name).value default_filename = f"{name_slug[:200]}-{suffix}" - encrypted_value, encrypted_key = await encrypt_user_secret( - user_repo=self.user_repo, + encrypted_value, encrypted_key = await self.user_repo.encrypt_user_secret( requested_by=requested_by, secret_service_public_key=self.secret_service_public_key, secret_value=secret.secret_value, @@ -194,8 +233,7 @@ async def update_secret(self, requested_by: APIUser, secret_id: ULID, patch: Sec ) secret.default_filename = patch.default_filename if patch.secret_value is not None: - encrypted_value, encrypted_key = await encrypt_user_secret( - user_repo=self.user_repo, + encrypted_value, encrypted_key = await self.user_repo.encrypt_user_secret( requested_by=requested_by, secret_service_public_key=self.secret_service_public_key, secret_value=patch.secret_value, diff --git a/components/renku_data_services/secrets/models.py b/components/renku_data_services/secrets/models.py index 9c4186c52..78f82423e 100644 --- a/components/renku_data_services/secrets/models.py +++ b/components/renku_data_services/secrets/models.py @@ -1,12 +1,26 @@ """Base models for secrets.""" +from __future__ import annotations + from dataclasses import dataclass, field from datetime import datetime from enum import StrEnum +from cryptography.hazmat.primitives.asymmetric import rsa from kubernetes import client as k8s_client from ulid import ULID +from renku_data_services.app_config import logging +from renku_data_services.utils.cryptography import ( + decrypt_rsa, + decrypt_string, + encrypt_rsa, + encrypt_string, + generate_random_encryption_key, +) + +logger = logging.getLogger(__name__) + class SecretKind(StrEnum): """Kind of secret. This should have the same values as users.apispec.SecretKind.""" @@ -33,7 +47,7 @@ class Secret: data_connector_ids: list[ULID] """List of data connector IDs where this user secret is used.""" - def update_encrypted_value(self, encrypted_value: bytes, encrypted_key: bytes) -> "Secret": + def update_encrypted_value(self, encrypted_value: bytes, encrypted_key: bytes) -> Secret: """Returns a new secret instance with updated encrypted_value and encrypted_key.""" return Secret( id=self.id, @@ -47,6 +61,29 @@ def update_encrypted_value(self, encrypted_value: bytes, encrypted_key: bytes) - data_connector_ids=self.data_connector_ids, ) + async def rotate_single_encryption_key( + self, user_id: str, new_key: rsa.RSAPrivateKey, old_key: rsa.RSAPrivateKey + ) -> Secret | None: + """Rotate a single secret in place.""" + # try using new key first as a sanity check, in case it was already rotated + try: + _ = decrypt_rsa(new_key, self.encrypted_key) + except ValueError: + pass + else: + return None # could decrypt with new key, nothing to do + + try: + decryption_key = decrypt_rsa(old_key, self.encrypted_key) + decrypted_value = decrypt_string(decryption_key, user_id, self.encrypted_value).encode() + new_encryption_key = generate_random_encryption_key() + encrypted_value = encrypt_string(new_encryption_key, user_id, decrypted_value.decode()) + encrypted_key = encrypt_rsa(new_key.public_key(), new_encryption_key) + return self.update_encrypted_value(encrypted_value=encrypted_value, encrypted_key=encrypted_key) + except Exception as e: + logger.error(f"Couldn't decrypt secret {self.name}({self.id}): {e}") + return None + @dataclass class OwnerReference: @@ -58,7 +95,7 @@ class OwnerReference: uid: str @classmethod - def from_dict(cls, data: dict[str, str]) -> "OwnerReference": + def from_dict(cls, data: dict[str, str]) -> OwnerReference: """Create an owner reference from a dict.""" return cls(apiVersion=data["apiVersion"], kind=data["kind"], name=data["name"], uid=data["uid"]) diff --git a/components/renku_data_services/session/api.spec.yaml b/components/renku_data_services/session/api.spec.yaml index 49e3e8fa3..08d6c3ce5 100644 --- a/components/renku_data_services/session/api.spec.yaml +++ b/components/renku_data_services/session/api.spec.yaml @@ -391,6 +391,10 @@ components: $ref: "#/components/schemas/EnvironmentArgs" is_archived: $ref: "#/components/schemas/IsArchived" + strip_path_prefix: + allOf: + - $ref: "#/components/schemas/StripPathPrefix" + default: false required: - id - name @@ -498,6 +502,10 @@ components: default: false environment_image_source: $ref: "#/components/schemas/EnvironmentImageSourceImage" + strip_path_prefix: + allOf: + - $ref: "#/components/schemas/StripPathPrefix" + default: false required: - name - container_image @@ -542,6 +550,8 @@ components: $ref: "#/components/schemas/EnvironmentArgs" is_archived: $ref: "#/components/schemas/IsArchived" + strip_path_prefix: + $ref: "#/components/schemas/StripPathPrefix" SessionLaunchersList: description: A list of Renku session launchers type: array @@ -895,6 +905,22 @@ components: type: array items: $ref: "#/components/schemas/Build" + StripPathPrefix: + type: boolean + default: false + description: |- + If set to true the default url and the base path where sessions are + served will be removed from all URL paths before the requests reach + the server running in the session. So the server in the session will + receive HTTP requests whose base path will be "/". However this will + not work unless the server running inside the session can be made + aware that paths are rewritten. For example, if the application/server + running in the session serves a HTML page that then loads javascript + and CSS, the path where these assets should be loaded from in the browser + will not be "/" but it has to include the prefix that was stripped. And + the server from the session that generated the HTML page needs to know + what is the full base path (including the part that was stripped) so that + it can make the URLs to such assets be reachable from the browser. BuildPatch: description: The requested update of a container image build type: object diff --git a/components/renku_data_services/session/apispec.py b/components/renku_data_services/session/apispec.py index 48e1cdf5a..d5c8c7098 100644 --- a/components/renku_data_services/session/apispec.py +++ b/components/renku_data_services/session/apispec.py @@ -1,6 +1,6 @@ # generated by datamodel-codegen: # filename: api.spec.yaml -# timestamp: 2025-07-21T13:24:31+00:00 +# timestamp: 2025-10-08T20:52:11+00:00 from __future__ import annotations @@ -189,6 +189,10 @@ class EnvironmentWithoutContainerImage(BaseAPISpec): False, description="Whether this environment is archived and not for use in new projects or not", ) + strip_path_prefix: bool = Field( + False, + description='If set to true the default url and the base path where sessions are\nserved will be removed from all URL paths before the requests reach\nthe server running in the session. So the server in the session will\nreceive HTTP requests whose base path will be "/". However this will\nnot work unless the server running inside the session can be made\naware that paths are rewritten. For example, if the application/server\nrunning in the session serves a HTML page that then loads javascript\nand CSS, the path where these assets should be loaded from in the browser\nwill not be "/" but it has to include the prefix that was stripped. And\nthe server from the session that generated the HTML page needs to know\nwhat is the full base path (including the part that was stripped) so that\nit can make the URLs to such assets be reachable from the browser.', + ) class Environment(EnvironmentWithoutContainerImage): @@ -269,6 +273,10 @@ class EnvironmentPost(BaseAPISpec): description="Whether this environment is archived and not for use in new projects or not", ) environment_image_source: EnvironmentImageSourceImage + strip_path_prefix: bool = Field( + False, + description='If set to true the default url and the base path where sessions are\nserved will be removed from all URL paths before the requests reach\nthe server running in the session. So the server in the session will\nreceive HTTP requests whose base path will be "/". However this will\nnot work unless the server running inside the session can be made\naware that paths are rewritten. For example, if the application/server\nrunning in the session serves a HTML page that then loads javascript\nand CSS, the path where these assets should be loaded from in the browser\nwill not be "/" but it has to include the prefix that was stripped. And\nthe server from the session that generated the HTML page needs to know\nwhat is the full base path (including the part that was stripped) so that\nit can make the URLs to such assets be reachable from the browser.', + ) class EnvironmentPatch(BaseAPISpec): @@ -326,6 +334,10 @@ class EnvironmentPatch(BaseAPISpec): False, description="Whether this environment is archived and not for use in new projects or not", ) + strip_path_prefix: Optional[bool] = Field( + False, + description='If set to true the default url and the base path where sessions are\nserved will be removed from all URL paths before the requests reach\nthe server running in the session. So the server in the session will\nreceive HTTP requests whose base path will be "/". However this will\nnot work unless the server running inside the session can be made\naware that paths are rewritten. For example, if the application/server\nrunning in the session serves a HTML page that then loads javascript\nand CSS, the path where these assets should be loaded from in the browser\nwill not be "/" but it has to include the prefix that was stripped. And\nthe server from the session that generated the HTML page needs to know\nwhat is the full base path (including the part that was stripped) so that\nit can make the URLs to such assets be reachable from the browser.', + ) class EnvironmentIdOnlyPatch(BaseAPISpec): diff --git a/components/renku_data_services/session/constants.py b/components/renku_data_services/session/constants.py index a9fdee0c9..7b5956980 100644 --- a/components/renku_data_services/session/constants.py +++ b/components/renku_data_services/session/constants.py @@ -14,9 +14,9 @@ BUILD_OUTPUT_IMAGE_NAME: Final[str] = "renku-build" """The container image name created from Renku builds.""" -BUILD_BUILDER_IMAGE: Final[str] = "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/selector:0.0.6" +BUILD_BUILDER_IMAGE: Final[str] = "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/selector:0.1.0" -BUILD_RUN_IMAGE: Final[str] = "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/base-image:0.0.6" +BUILD_RUN_IMAGE: Final[str] = "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/base-image:0.1.0" BUILD_MOUNT_DIRECTORY: Final[PurePosixPath] = PurePosixPath("/home/renku/work") BUILD_WORKING_DIRECTORY: Final[PurePosixPath] = BUILD_MOUNT_DIRECTORY BUILD_UID: Final[int] = 1000 diff --git a/components/renku_data_services/session/core.py b/components/renku_data_services/session/core.py index bfbea2f15..a9c83227b 100644 --- a/components/renku_data_services/session/core.py +++ b/components/renku_data_services/session/core.py @@ -30,6 +30,7 @@ def validate_unsaved_environment( command=environment.command, is_archived=environment.is_archived, environment_image_source=models.EnvironmentImageSource.image, + strip_path_prefix=environment.strip_path_prefix or False, ) @@ -67,8 +68,8 @@ def validate_unsaved_build_parameters( repository=environment.repository, builder_variant=environment.builder_variant, frontend_variant=environment.frontend_variant, - repository_revision=environment.repository_revision, - context_dir=environment.context_dir, + repository_revision=environment.repository_revision if environment.repository_revision else None, + context_dir=environment.context_dir if environment.context_dir else None, ) @@ -130,6 +131,7 @@ def validate_environment_patch(patch: apispec.EnvironmentPatch) -> models.Enviro args=RESET if "args" in data_dict and data_dict["args"] is None else patch.args, command=RESET if "command" in data_dict and data_dict["command"] is None else patch.command, is_archived=patch.is_archived, + strip_path_prefix=patch.strip_path_prefix, ) @@ -210,6 +212,7 @@ def validate_session_launcher_patch( args=validated_env.args, command=validated_env.command, environment_image_source=models.EnvironmentImageSource.image, + strip_path_prefix=validated_env.strip_path_prefix or False, ) elif patch.environment.environment_image_source == apispec.EnvironmentImageSourceBuild.build: # NOTE: The environment type is changed to be built, so, all required fields should be passed (as in @@ -287,6 +290,7 @@ def validate_session_launcher_patch( args=validated_env.args, command=validated_env.command, environment_image_source=models.EnvironmentImageSource.image, + strip_path_prefix=validated_env.strip_path_prefix or False, ) else: environment = validate_environment_patch_in_launcher(patch.environment) diff --git a/components/renku_data_services/session/db.py b/components/renku_data_services/session/db.py index 71463a655..72323a402 100644 --- a/components/renku_data_services/session/db.py +++ b/components/renku_data_services/session/db.py @@ -100,6 +100,7 @@ def __insert_environment( args=new_environment.args, creation_date=datetime.now(UTC).replace(microsecond=0), is_archived=new_environment.is_archived, + strip_path_prefix=new_environment.strip_path_prefix, ) session.add(environment) @@ -132,6 +133,7 @@ def __copy_environment( creation_date=datetime.now(UTC).replace(microsecond=0), is_archived=environment.is_archived, environment_image_source=environment.environment_image_source, + strip_path_prefix=environment.strip_path_prefix, ) if environment.environment_image_source == models.EnvironmentImageSource.build: @@ -190,6 +192,7 @@ def __insert_build_parameters_environment( environment_image_source=models.EnvironmentImageSource.build, build_parameters_id=build_parameters_orm.id, build_parameters=build_parameters_orm, + strip_path_prefix=False, ) session.add(environment_orm) return environment_orm @@ -251,6 +254,9 @@ def __update_environment( if update.is_archived is not None: environment.is_archived = update.is_archived + if update.strip_path_prefix is not None: + environment.strip_path_prefix = update.strip_path_prefix + async def __update_environment_build_parameters( self, environment: schemas.EnvironmentORM, update: models.EnvironmentPatch ) -> None: @@ -411,6 +417,7 @@ async def insert_launcher( args=launcher.environment.args, creation_date=datetime.now(UTC).replace(microsecond=0), environment_image_source=models.EnvironmentImageSource.image, + strip_path_prefix=launcher.environment.strip_path_prefix, ) session.add(environment_orm) elif isinstance(launcher.environment, models.UnsavedBuildParameters): @@ -441,6 +448,7 @@ async def insert_launcher( environment_image_source=models.EnvironmentImageSource.build, build_parameters_id=build_parameters_orm.id, build_parameters=build_parameters_orm, + strip_path_prefix=False, # TODO: Should this maybe be adjustable? ) session.add(environment_orm) @@ -695,6 +703,7 @@ async def __update_launcher_environment( launcher.environment.args = update.args launcher.environment.environment_image_source = models.EnvironmentImageSource.image launcher.environment.build_parameters_id = None + launcher.environment.strip_path_prefix = update.strip_path_prefix # NOTE: Delete the build parameters since they are not used by any other environment await session.delete(build_parameters) @@ -734,6 +743,7 @@ async def __update_launcher_environment( launcher.environment.environment_image_source = models.EnvironmentImageSource.build launcher.environment.build_parameters_id = build_parameters_orm.id launcher.environment.build_parameters = build_parameters_orm + launcher.environment.strip_path_prefix = False await session.flush() case _: diff --git a/components/renku_data_services/session/k8s_client.py b/components/renku_data_services/session/k8s_client.py index 1341f5a6d..53aac655a 100644 --- a/components/renku_data_services/session/k8s_client.py +++ b/components/renku_data_services/session/k8s_client.py @@ -223,8 +223,30 @@ async def update_image_build_status(self, buildrun_name: str, user_id: str) -> m return models.ShipwrightBuildStatusUpdate(update=None) conditions = k8s_build_status.conditions + # NOTE: You can get a condition like this in some cases during autoscaling or for other reasons + # message: Not all Steps in the Task have finished executing + # reason: Running + # status: Unknown + # /type: Succeeded + # or + # message: TaskRun Pod exceeded available resources + # reason: ExceededNodeResources + # status: Unknown + # /type: Succeeded + # In this case we want to keep waiting - the buildrun is still running. + # A fully successful completion condition looks like this: + # reason: Succeeded + # status: True + # /type: Succeeded + # See https://shipwright.io/docs/build/buildrun/#understanding-the-state-of-a-buildrun + # NOTE: In the examples above I put / before the type field because mypy parses that and fails. + # So I needed something to keep mypy happy. The real name of the field is "type" condition = next(filter(lambda c: c.type == "Succeeded", conditions or []), None) + if condition is not None and condition.status not in ["True", "False"]: + # The buildrun is still running or pending + return models.ShipwrightBuildStatusUpdate(update=None) + buildSpec = k8s_build_status.buildSpec output = buildSpec.output if buildSpec else None result_image = output.image if output else "unknown" @@ -238,7 +260,7 @@ async def update_image_build_status(self, buildrun_name: str, user_id: str) -> m result_repository_git_commit_sha = git_obj_2.commitSha if git_obj_2 else None result_repository_git_commit_sha = result_repository_git_commit_sha or "unknown" - if condition is not None and condition.status == "True": + if condition is not None and condition.reason == "Succeeded" and condition.status == "True": return models.ShipwrightBuildStatusUpdate( update=models.ShipwrightBuildStatusUpdateContent( status=models.BuildStatus.succeeded, @@ -293,7 +315,7 @@ async def _get_pod_logs(self, name: str, max_log_lines: int | None = None) -> di logs: dict[str, str] = {} if result is None: return logs - cluster = self.client.cluster_by_id(result.cluster) + cluster = await self.client.cluster_by_id(result.cluster) obj = result.to_api_object(cluster.api) result = Pod(resource=obj, namespace=obj.namespace, api=cluster.api) diff --git a/components/renku_data_services/session/models.py b/components/renku_data_services/session/models.py index 03a24c2ba..956bbecf0 100644 --- a/components/renku_data_services/session/models.py +++ b/components/renku_data_services/session/models.py @@ -51,6 +51,7 @@ class FrontendVariant(StrEnum): vscodium = "vscodium" jupyterlab = "jupyterlab" + ttyd = "ttyd" @dataclass(kw_only=True, frozen=True, eq=True) @@ -89,6 +90,7 @@ class UnsavedEnvironment: args: list[str] | None = None command: list[str] | None = None is_archived: bool = False + strip_path_prefix: bool = False def __post_init__(self) -> None: if self.working_directory and not self.working_directory.is_absolute(): @@ -152,6 +154,7 @@ class EnvironmentPatch: is_archived: bool | None = None build_parameters: BuildParametersPatch | None = None environment_image_source: EnvironmentImageSource | None = None + strip_path_prefix: bool | None = None # TODO: Verify that these limits are compatible with k8s diff --git a/components/renku_data_services/session/orm.py b/components/renku_data_services/session/orm.py index f3cee6859..3f30f7a3b 100644 --- a/components/renku_data_services/session/orm.py +++ b/components/renku_data_services/session/orm.py @@ -80,6 +80,7 @@ class EnvironmentORM(BaseORM): default=None, ) build_parameters: Mapped["BuildParametersORM"] = relationship(lazy="joined", default=None) + strip_path_prefix: Mapped[bool] = mapped_column(default=False, server_default=false(), nullable=False) def dump(self) -> models.Environment: """Create a session environment model from the EnvironmentORM.""" @@ -103,6 +104,7 @@ def dump(self) -> models.Environment: environment_image_source=self.environment_image_source, build_parameters=self.build_parameters.dump() if self.build_parameters else None, build_parameters_id=self.build_parameters_id, + strip_path_prefix=self.strip_path_prefix, ) @@ -216,8 +218,8 @@ def dump(self) -> models.BuildParameters: repository=self.repository, builder_variant=self.builder_variant, frontend_variant=self.frontend_variant, - repository_revision=self.repository_revision, - context_dir=self.context_dir, + repository_revision=self.repository_revision or None, + context_dir=self.context_dir or None, ) diff --git a/components/renku_data_services/storage/rclone.py b/components/renku_data_services/storage/rclone.py index 48d00bce1..9c63eef19 100644 --- a/components/renku_data_services/storage/rclone.py +++ b/components/renku_data_services/storage/rclone.py @@ -11,7 +11,7 @@ from renku_data_services import errors from renku_data_services.app_config import logging -from renku_data_services.storage.rclone_patches import BANNED_STORAGE, apply_patches +from renku_data_services.storage.rclone_patches import BANNED_SFTP_OPTIONS, BANNED_STORAGE, apply_patches logger = logging.getLogger(__name__) @@ -375,6 +375,14 @@ def get_option_for_provider(self, name: str, provider: str | None) -> RCloneOpti return None + def check_unsafe_option(self, name: str) -> None: + """Check that the option is safe.""" + if self.prefix != "sftp": + return None + if name in BANNED_SFTP_OPTIONS: + raise errors.ValidationError(message=f"The {name} option is not allowed.") + return None + def validate_config( self, configuration: Union["RCloneConfig", dict[str, Any]], keep_sensitive: bool = False ) -> None: @@ -399,6 +407,8 @@ def validate_config( raise errors.ValidationError(message=f"The following fields are required but missing:\n{missing_str}") for key in keys: + self.check_unsafe_option(key) + value = configuration[key] option: RCloneOption | None = self.get_option_for_provider(key, provider) diff --git a/components/renku_data_services/storage/rclone_patches.py b/components/renku_data_services/storage/rclone_patches.py index c563491fb..8930d75c3 100644 --- a/components/renku_data_services/storage/rclone_patches.py +++ b/components/renku_data_services/storage/rclone_patches.py @@ -31,13 +31,20 @@ "onedrive", "pcloud", "pikpak", - "premiumzeme", + "premiumizeme", "putio", "sharefile", "yandex", "zoho", } +BANNED_SFTP_OPTIONS: Final[set[str]] = { + "key_file", # path to a local file + "pubkey_file", # path to a local file + "known_hosts_file", # path to a local file + "ssh", # arbitrary command to be executed +} + def find_storage(spec: list[dict[str, Any]], prefix: str) -> dict[str, Any]: """Find and return the storage schema from the spec. @@ -241,6 +248,16 @@ def __patch_switchdrive_storage(spec: list[dict[str, Any]]) -> None: ) +def __patch_schema_remove_banned_sftp_options(spec: list[dict[str, Any]]) -> None: + """Remove unsafe SFTP options.""" + sftp = find_storage(spec, "sftp") + options = [] + for option in sftp["Options"]: + if option["Name"] not in BANNED_SFTP_OPTIONS: + options.append(option) + sftp["Options"] = options + + def apply_patches(spec: list[dict[str, Any]]) -> None: """Apply patches to RClone schema.""" patches = [ @@ -252,6 +269,7 @@ def apply_patches(spec: list[dict[str, Any]]) -> None: __patch_polybox_storage, __patch_switchdrive_storage, __patch_schema_add_openbis_type, + __patch_schema_remove_banned_sftp_options, ] for patch in patches: diff --git a/components/renku_data_services/users/db.py b/components/renku_data_services/users/db.py index 496c02194..c807e8b78 100644 --- a/components/renku_data_services/users/db.py +++ b/components/renku_data_services/users/db.py @@ -9,6 +9,7 @@ from datetime import UTC, datetime, timedelta from typing import Any, Protocol, cast +from cryptography.hazmat.primitives.asymmetric import rsa from sqlalchemy import delete, func, select from sqlalchemy.ext.asyncio import AsyncSession @@ -38,7 +39,12 @@ ) from renku_data_services.users.orm import LastKeycloakEventTimestamp, UserORM, UserPreferencesORM from renku_data_services.utils.core import with_db_transaction -from renku_data_services.utils.cryptography import decrypt_string, encrypt_string +from renku_data_services.utils.cryptography import ( + decrypt_string, + encrypt_rsa, + encrypt_string, + generate_random_encryption_key, +) logger = logging.getLogger(__name__) @@ -220,6 +226,30 @@ async def get_or_create_user_secret_key(self, requested_by: APIUser) -> str: return secret_key + async def encrypt_user_secret( + self, + requested_by: base_models.APIUser, + secret_service_public_key: rsa.RSAPublicKey, + secret_value: str, + ) -> tuple[bytes, bytes]: + """Doubly encrypt a secret for a user. + + Since RSA cannot encrypt arbitrary length strings, we use symmetric encryption with a random key and encrypt the + random key with RSA to get it to the secret service. + """ + if requested_by.id is None: + raise errors.ValidationError(message="APIUser has no id") + + user_secret_key = await self.get_or_create_user_secret_key(requested_by=requested_by) + + # encrypt once with user secret + encrypted_value = encrypt_string(user_secret_key.encode(), requested_by.id, secret_value) + # encrypt again with the secret service public key + secret_svc_encryption_key = generate_random_encryption_key() + doubly_encrypted_value = encrypt_string(secret_svc_encryption_key, requested_by.id, encrypted_value.decode()) + encrypted_key = encrypt_rsa(secret_service_public_key, secret_svc_encryption_key) + return doubly_encrypted_value, encrypted_key + class UsersSync: """Sync users from Keycloak to the database.""" diff --git a/components/renku_data_services/users/models.py b/components/renku_data_services/users/models.py index 8a77fc78e..d30dd38d7 100644 --- a/components/renku_data_services/users/models.py +++ b/components/renku_data_services/users/models.py @@ -1,5 +1,7 @@ """Base models for users.""" +from __future__ import annotations + import json import re from collections.abc import Iterable @@ -42,7 +44,7 @@ class UserInfoFieldUpdate: old_value: str | None = None @classmethod - def from_json_user_events(cls, val: Iterable[dict[str, Any]]) -> list["UserInfoFieldUpdate"]: + def from_json_user_events(cls, val: Iterable[dict[str, Any]]) -> list[UserInfoFieldUpdate]: """Generate a list of updates from a json response from Keycloak.""" output: list[UserInfoFieldUpdate] = [] for event in val: @@ -133,7 +135,7 @@ def from_json_user_events(cls, val: Iterable[dict[str, Any]]) -> list["UserInfoF return output @classmethod - def from_json_admin_events(cls, val: Iterable[dict[str, Any]]) -> list["UserInfoFieldUpdate"]: + def from_json_admin_events(cls, val: Iterable[dict[str, Any]]) -> list[UserInfoFieldUpdate]: """Generate a list of updates from a json response from Keycloak.""" output: list[UserInfoFieldUpdate] = [] for event in val: @@ -218,7 +220,7 @@ class UnsavedUserInfo: email: str | None = None @classmethod - def from_kc_user_payload(cls, payload: dict[str, Any]) -> "UnsavedUserInfo": + def from_kc_user_payload(cls, payload: dict[str, Any]) -> UnsavedUserInfo: """Create a user object from the user payload from the Keycloak admin API.""" return UnsavedUserInfo( id=payload["id"], @@ -271,7 +273,7 @@ class UserPatch: email: str | None = None @classmethod - def from_unsaved_user_info(cls, user: UnsavedUserInfo) -> "UserPatch": + def from_unsaved_user_info(cls, user: UnsavedUserInfo) -> UserPatch: """Create a user patch from a UnsavedUserInfo instance.""" return UserPatch( first_name=user.first_name, @@ -300,7 +302,7 @@ class PinnedProjects(BaseModel): project_slugs: list[str] | None = None @classmethod - def from_dict(cls, data: dict) -> "PinnedProjects": + def from_dict(cls, data: dict) -> PinnedProjects: """Create model from a dict object.""" return cls(project_slugs=data.get("project_slugs")) diff --git a/components/renku_data_services/users/orm.py b/components/renku_data_services/users/orm.py index a53b45f51..9536ab3f8 100644 --- a/components/renku_data_services/users/orm.py +++ b/components/renku_data_services/users/orm.py @@ -1,5 +1,7 @@ """SQLAlchemy schemas for the CRC database.""" +from __future__ import annotations + from datetime import datetime from typing import TYPE_CHECKING, Any, Optional @@ -28,7 +30,7 @@ class UserORM(BaseORM): __tablename__ = "users" keycloak_id: Mapped[str] = mapped_column(String(36), unique=True, index=True) - namespace: Mapped["NamespaceORM"] = relationship(repr=False, back_populates="user", lazy="selectin") + namespace: Mapped[NamespaceORM] = relationship(repr=False, back_populates="user", lazy="selectin") first_name: Mapped[Optional[str]] = mapped_column(String(256), default=None) last_name: Mapped[Optional[str]] = mapped_column(String(256), default=None) email: Mapped[Optional[str]] = mapped_column(String(320), default=None, index=True) @@ -46,7 +48,7 @@ def dump(self) -> UserInfo: ) @classmethod - def load(cls, user: UserInfo) -> "UserORM": + def load(cls, user: UserInfo) -> UserORM: """Create an ORM object from the user object.""" return cls( keycloak_id=user.id, @@ -87,7 +89,7 @@ class UserPreferencesORM(BaseORM): """Show project migration banner.""" @classmethod - def load(cls, user_preferences: UserPreferences) -> "UserPreferencesORM": + def load(cls, user_preferences: UserPreferences) -> UserPreferencesORM: """Create UserPreferencesORM from the user preferences model.""" return cls( user_id=user_preferences.user_id, diff --git a/components/renku_data_services/utils/core.py b/components/renku_data_services/utils/core.py index 587f5528d..a565a3793 100644 --- a/components/renku_data_services/utils/core.py +++ b/components/renku_data_services/utils/core.py @@ -10,21 +10,10 @@ import httpx from deepmerge import Merger from sqlalchemy.ext.asyncio import AsyncSession -from tenacity import retry, stop_after_attempt, stop_after_delay, wait_fixed from renku_data_services import errors -@retry(stop=(stop_after_attempt(20) | stop_after_delay(300)), wait=wait_fixed(2), reraise=True) -def oidc_discovery(url: str, realm: str) -> dict[str, Any]: - """Get OIDC configuration.""" - url = f"{url}/realms/{realm}/.well-known/openid-configuration" - res = httpx.get(url, verify=get_ssl_context(), timeout=5) - if res.status_code == 200: - return cast(dict[str, Any], res.json()) - raise errors.ConfigurationError(message=f"Cannot successfully do OIDC discovery with url {url}.") - - @functools.lru_cache(1) def get_ssl_context() -> ssl.SSLContext: """Get an SSL context supporting mounted custom certificates.""" diff --git a/components/renku_pack_builder/manifests/buildstrategy.yaml b/components/renku_pack_builder/manifests/buildstrategy.yaml index e5d4b45a1..eecfb90c3 100644 --- a/components/renku_pack_builder/manifests/buildstrategy.yaml +++ b/components/renku_pack_builder/manifests/buildstrategy.yaml @@ -10,10 +10,10 @@ spec: default: "0.12" - name: run-image description: The image to use as the base for all session images built with this strategy - default: "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/base-image:0.0.6" + default: "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/base-image:0.1.0" - name: builder-image description: The buildpack builder image to use - default: "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/selector:0.0.6" + default: "ghcr.io/swissdatasciencecenter/renku-frontend-buildpacks/selector:0.1.0" - name: frontend description: Which frontend should be used in the image. Either "jupyterlab" or "vscodium". default: vscodium diff --git a/flake.lock b/flake.lock index a5efc080f..a109fe701 100644 --- a/flake.lock +++ b/flake.lock @@ -8,11 +8,11 @@ ] }, "locked": { - "lastModified": 1750151194, - "narHash": "sha256-2/U7Ifd46PFr7ZT0wWWAVY2usfqfVqN+/a5F0sXFsWQ=", + "lastModified": 1755530935, + "narHash": "sha256-i54nyC64jt/Weh5S0tWemGpiOp1r8fZGUpYE8WgtaJs=", "owner": "eikek", "repo": "devshell-tools", - "rev": "1db1256aba5f93e13f519c80bcbc51368a358491", + "rev": "5ac351010d3af467e25374be3a471aef7dae8748", "type": "github" }, "original": { @@ -59,11 +59,11 @@ }, "nixpkgs": { "locked": { - "lastModified": 1750776420, - "narHash": "sha256-/CG+w0o0oJ5itVklOoLbdn2dGB0wbZVOoDm4np6w09A=", + "lastModified": 1757487488, + "narHash": "sha256-zwE/e7CuPJUWKdvvTCB7iunV4E/+G0lKfv4kk/5Izdg=", "owner": "NixOS", "repo": "nixpkgs", - "rev": "30a61f056ac492e3b7cdcb69c1e6abdcf00e39cf", + "rev": "ab0f3607a6c7486ea22229b92ed2d355f1482ee0", "type": "github" }, "original": { diff --git a/flake.nix b/flake.nix index 15f71476a..00132de5b 100644 --- a/flake.nix +++ b/flake.nix @@ -119,11 +119,17 @@ SOLR_BIN_PATH = "${devshellToolsPkgs.solr}/bin/solr"; shellHook = '' + PYENV_PATH=$(poetry env info --path) export FLAKE_ROOT="$(git rev-parse --show-toplevel)" - export PATH="$FLAKE_ROOT/.venv/bin:$PATH" + export PATH="$PYENV_PATH/bin:$PATH" export ALEMBIC_CONFIG="$FLAKE_ROOT/components/renku_data_services/migrations/alembic.ini" export NB_SERVER_OPTIONS__DEFAULTS_PATH="$FLAKE_ROOT/server_defaults.json" export NB_SERVER_OPTIONS__UI_CHOICES_PATH="$FLAKE_ROOT/server_options.json" + export ENCRYPTION_KEY_PATH="$FLAKE_ROOT/.encryption_key" + + if [ ! -e "$FLAKE_ROOT/.encryption_key" ]; then + head -c30 /dev/random > "$FLAKE_ROOT/.encryption_key" + fi ''; }; @@ -143,6 +149,8 @@ python313 basedpyright rclone-sdsc + azure-cli + k3d ( writeShellScriptBin "pg" '' psql -h $DB_HOST -p $DB_PORT -U dev $DB_NAME @@ -173,6 +181,11 @@ ${spicedb-zed}/bin/zed --no-verify-ca --insecure --endpoint ''$ZED_ENDPOINT --token ''$ZED_TOKEN $@ '' ) + ( + writeShellScriptBin "ptest" '' + pytest --disable-warnings --no-cov -s -p no:warnings $@ + '' + ) ]; in { formatter = pkgs.alejandra; @@ -225,8 +238,9 @@ ]; shellHook = '' + PYENV_PATH=$(poetry env info --path) export FLAKE_ROOT="$(git rev-parse --show-toplevel)" - export PATH="$FLAKE_ROOT/.venv/bin:$PATH" + export PATH="$PYENV_PATH/bin:$PATH" ''; }); vm = pkgs.mkShell (devSettings diff --git a/poetry.lock b/poetry.lock index 1c941b63b..84a4e1359 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. [[package]] name = "aiofiles" @@ -1436,7 +1436,7 @@ description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.9" groups = ["main"] -markers = "python_version == \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" +markers = "python_version < \"3.14\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")" files = [ {file = "greenlet-3.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:777c1281aa7c786738683e302db0f55eb4b0077c20f1dc53db8852ffaea0a6b0"}, {file = "greenlet-3.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3059c6f286b53ea4711745146ffe5a5c5ff801f62f6c56949446e0f6461f8157"}, @@ -4158,7 +4158,7 @@ description = "C version of reader, parser and emitter for ruamel.yaml derived f optional = false python-versions = ">=3.9" groups = ["dev"] -markers = "platform_python_implementation == \"CPython\" and python_version == \"3.13\"" +markers = "platform_python_implementation == \"CPython\" and python_version < \"3.14\"" files = [ {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, diff --git a/projects/k8s_watcher/poetry.lock b/projects/k8s_watcher/poetry.lock index 30dc1ce9a..c7386638b 100644 --- a/projects/k8s_watcher/poetry.lock +++ b/projects/k8s_watcher/poetry.lock @@ -1,19 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. - -[[package]] -name = "aiofile" -version = "3.9.0" -description = "Asynchronous file operations." -optional = false -python-versions = "<4,>=3.8" -groups = ["main"] -files = [ - {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, - {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, -] - -[package.dependencies] -caio = ">=0.9.0,<0.10.0" +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "aiofiles" @@ -415,35 +400,6 @@ files = [ {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] -[[package]] -name = "caio" -version = "0.9.24" -description = "Asynchronous file IO for Linux MacOS or Windows." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "caio-0.9.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d80322126a97ba572412b17b2f086ff95195de2c4261deb19db6bfcdc9ef7540"}, - {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:37bc172349686139e8dc97fff7662c67b1837e18a67b99e8ef25585f2893d013"}, - {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ad7f0902bf952237e120606252c14ab3cb05995c9f79f39154b5248744864832"}, - {file = "caio-0.9.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:925b9e3748ce1a79386dfb921c0aee450e43225534551abd1398b1c08f9ba29f"}, - {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:3b4dc0a8fb9a58ab40f967ad5a8a858cc0bfb2348a580b4142595849457f9c9a"}, - {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fa74d111b3b165bfad2e333367976bdf118bcf505a1cb44d3bcddea2849e3297"}, - {file = "caio-0.9.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae3566228383175265a7583107f21a7cb044a752ea29ba84fce7c1a49a05903"}, - {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:a306b0dda91cb4ca3170f066c114597f8ea41b3da578574a9d2b54f86963de68"}, - {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:8ee158e56128d865fb7d57a9c9c22fca4e8aa8d8664859c977a36fff3ccb3609"}, - {file = "caio-0.9.24-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d47ef8d76aca74c17cb07339a441c5530fc4b8dd9222dfb1e1abd7f9f9b814f"}, - {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:d15fc746c4bf0077d75df05939d1e97c07ccaa8e580681a77021d6929f65d9f4"}, - {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9368eae0a9badd5f31264896c51b47431d96c0d46f1979018fb1d20c49f56156"}, - {file = "caio-0.9.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f0e5a645ef4e7bb7a81e10ae2a7aef14988cb2cb4354588c6bf6f6f3f6de72a"}, - {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:08304fa80af7771c78a5bcc923449c7ec8134d589b50d48c66320f85552c7ae2"}, - {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:5339ced0764e10242a50ccb21db7f0d9c359881db0f72fa2c5e45ed828ffacf7"}, - {file = "caio-0.9.24.tar.gz", hash = "sha256:5bcdecaea02a9aa8e3acf0364eff8ad9903d57d70cdb274a42270126290a77f1"}, -] - -[package.extras] -develop = ["aiomisc-pytest", "coveralls", "pylama[toml]", "pytest", "pytest-cov", "setuptools"] - [[package]] name = "casefy" version = "1.0.0" @@ -3417,4 +3373,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "7938b536cdef40647ae41faeedb2f265396d84d411553f3f09244775f90f61b7" +content-hash = "a67e8079ef804510f274d7bf28564da7913094b261f04c14b8abc55045d03595" diff --git a/projects/k8s_watcher/pyproject.toml b/projects/k8s_watcher/pyproject.toml index 05d127a62..fef8ca23d 100644 --- a/projects/k8s_watcher/pyproject.toml +++ b/projects/k8s_watcher/pyproject.toml @@ -24,7 +24,6 @@ packages = [ { include = "renku_data_services/errors", from = "../../components" }, { include = "renku_data_services/git", from = "../../components" }, { include = "renku_data_services/k8s", from = "../../components" }, - { include = "renku_data_services/k8s_watcher", from = "../../components" }, { include = "renku_data_services/message_queue", from = "../../components" }, { include = "renku_data_services/namespace", from = "../../components" }, { include = "renku_data_services/platform", from = "../../components" }, @@ -69,7 +68,7 @@ sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } authzed = "^1.20.0" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore setuptools = { version = "^75.8.2" } -aiofile = "^3.9.0" +aiofiles = "^24.1.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. diff --git a/projects/renku_data_service/Dockerfile b/projects/renku_data_service/Dockerfile index 3d191d2e3..edb923c89 100644 --- a/projects/renku_data_service/Dockerfile +++ b/projects/renku_data_service/Dockerfile @@ -47,4 +47,4 @@ USER $USER_UID:$USER_GID WORKDIR /app COPY --from=builder /app/env ./env ENV DB_POOL_SIZE=10 -ENTRYPOINT ["tini", "-g", "--", "env/bin/python", "-m", "renku_data_services.data_api.main"] +ENTRYPOINT ["tini", "-g", "--", "env/bin/python", "-m", "renku_data_services.data_api.main", "--single-process"] diff --git a/projects/renku_data_service/poetry.lock b/projects/renku_data_service/poetry.lock index cd62d2d9a..329d109b3 100644 --- a/projects/renku_data_service/poetry.lock +++ b/projects/renku_data_service/poetry.lock @@ -1,19 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. - -[[package]] -name = "aiofile" -version = "3.9.0" -description = "Asynchronous file operations." -optional = false -python-versions = "<4,>=3.8" -groups = ["main"] -files = [ - {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, - {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, -] - -[package.dependencies] -caio = ">=0.9.0,<0.10.0" +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "aiofiles" @@ -427,35 +412,6 @@ files = [ {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] -[[package]] -name = "caio" -version = "0.9.24" -description = "Asynchronous file IO for Linux MacOS or Windows." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "caio-0.9.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d80322126a97ba572412b17b2f086ff95195de2c4261deb19db6bfcdc9ef7540"}, - {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:37bc172349686139e8dc97fff7662c67b1837e18a67b99e8ef25585f2893d013"}, - {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ad7f0902bf952237e120606252c14ab3cb05995c9f79f39154b5248744864832"}, - {file = "caio-0.9.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:925b9e3748ce1a79386dfb921c0aee450e43225534551abd1398b1c08f9ba29f"}, - {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:3b4dc0a8fb9a58ab40f967ad5a8a858cc0bfb2348a580b4142595849457f9c9a"}, - {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fa74d111b3b165bfad2e333367976bdf118bcf505a1cb44d3bcddea2849e3297"}, - {file = "caio-0.9.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae3566228383175265a7583107f21a7cb044a752ea29ba84fce7c1a49a05903"}, - {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:a306b0dda91cb4ca3170f066c114597f8ea41b3da578574a9d2b54f86963de68"}, - {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:8ee158e56128d865fb7d57a9c9c22fca4e8aa8d8664859c977a36fff3ccb3609"}, - {file = "caio-0.9.24-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d47ef8d76aca74c17cb07339a441c5530fc4b8dd9222dfb1e1abd7f9f9b814f"}, - {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:d15fc746c4bf0077d75df05939d1e97c07ccaa8e580681a77021d6929f65d9f4"}, - {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9368eae0a9badd5f31264896c51b47431d96c0d46f1979018fb1d20c49f56156"}, - {file = "caio-0.9.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f0e5a645ef4e7bb7a81e10ae2a7aef14988cb2cb4354588c6bf6f6f3f6de72a"}, - {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:08304fa80af7771c78a5bcc923449c7ec8134d589b50d48c66320f85552c7ae2"}, - {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:5339ced0764e10242a50ccb21db7f0d9c359881db0f72fa2c5e45ed828ffacf7"}, - {file = "caio-0.9.24.tar.gz", hash = "sha256:5bcdecaea02a9aa8e3acf0364eff8ad9903d57d70cdb274a42270126290a77f1"}, -] - -[package.extras] -develop = ["aiomisc-pytest", "coveralls", "pylama[toml]", "pytest", "pytest-cov", "setuptools"] - [[package]] name = "cel-python" version = "0.2.0" @@ -3367,4 +3323,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "7f5f30247d45caad40a3326283b944904847ca7433a4558e9029182620486073" +content-hash = "8b0e7b2ac2ccc195ad579ad09502d47ba20cb622c47c067b2998c80d3951a1b7" diff --git a/projects/renku_data_service/pyproject.toml b/projects/renku_data_service/pyproject.toml index 693811cf8..4180e416b 100644 --- a/projects/renku_data_service/pyproject.toml +++ b/projects/renku_data_service/pyproject.toml @@ -24,7 +24,6 @@ packages = [ { include = "renku_data_services/errors", from = "../../components" }, { include = "renku_data_services/git", from = "../../components" }, { include = "renku_data_services/k8s", from = "../../components" }, - { include = "renku_data_services/k8s_watcher", from = "../../components" }, { include = "renku_data_services/message_queue", from = "../../components" }, { include = "renku_data_services/namespace", from = "../../components" }, { include = "renku_data_services/platform", from = "../../components" }, @@ -68,7 +67,7 @@ sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } authzed = "^1.20.0" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore setuptools = { version = "^75.8.2" } -aiofile = "^3.9.0" +aiofiles = "^24.1.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. diff --git a/projects/renku_data_tasks/poetry.lock b/projects/renku_data_tasks/poetry.lock index 94226e83a..442fc8edb 100644 --- a/projects/renku_data_tasks/poetry.lock +++ b/projects/renku_data_tasks/poetry.lock @@ -1,19 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. - -[[package]] -name = "aiofile" -version = "3.9.0" -description = "Asynchronous file operations." -optional = false -python-versions = "<4,>=3.8" -groups = ["main"] -files = [ - {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, - {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, -] - -[package.dependencies] -caio = ">=0.9.0,<0.10.0" +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] name = "aiofiles" @@ -427,38 +412,6 @@ files = [ {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] -[[package]] -name = "caio" -version = "0.9.22" -description = "Asynchronous file IO for Linux MacOS or Windows." -optional = false -python-versions = "<4,>=3.7" -groups = ["main"] -files = [ - {file = "caio-0.9.22-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:979bef84869822a0b1d10c99f7240e2ca8b00c138a54bec1fcbef1163a6bc976"}, - {file = "caio-0.9.22-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:bef2533b1444ce80df47ecce25ad8def6eb76a5ba8c1457074a16f5ab6e12670"}, - {file = "caio-0.9.22-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:76fe5d98ff55099ec61a1863eed53ef353ea815e529c19a284865af73b3c84a3"}, - {file = "caio-0.9.22-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:dd99c28a85809d0954211d637dd0e2e5c0e385dcdfbdbc4ed914b549d7e0fb69"}, - {file = "caio-0.9.22-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:6ee67c4d63b77b3d07715c41939b71fee1bbb986219e398f3420ac31b43e65a8"}, - {file = "caio-0.9.22-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fb335891181107bdb8f96dc133fc8e2612b62cd270219333659d6d53f75b1770"}, - {file = "caio-0.9.22-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:25aae4c3b846eeca63c7d75d63b3fd8deaba9650ef1168d8a239e6a927299ded"}, - {file = "caio-0.9.22-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:ac332b8e2c8e0840fe10ee6971b38f8dac6ad64ecf6087ee3f70cd376f511699"}, - {file = "caio-0.9.22-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:6f991e4812fd2d6e87f91ff78fcc7d4f299bd98765334756580d4ea42cad89f1"}, - {file = "caio-0.9.22-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:c3d29f0f2f3a577b58794960dbfc78ef124a8b3e238d653a4c11c57f20651e34"}, - {file = "caio-0.9.22-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:a3307e064f423675c4ffd2608ecc86bc47652ec1547dab5f94b72d9599c23201"}, - {file = "caio-0.9.22-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:160a1dad60bbb4268811f88f817205fab6cba8fb50801fdd3d865b69999ea924"}, - {file = "caio-0.9.22-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:ceb32307d6d3cfa512220b53b578833ebd27f8ed0b534f15e3e88e6d598bffe7"}, - {file = "caio-0.9.22-cp38-cp38-manylinux_2_34_aarch64.whl", hash = "sha256:7145f2c47233af12cc0194a7bfe584c632b16fa00d3f900806eda527f89ce93b"}, - {file = "caio-0.9.22-cp38-cp38-manylinux_2_34_x86_64.whl", hash = "sha256:3bedd3e8ccc485cb20d4d56b0af1471e8f8b5ca3f0b5f1f21ebd087bfdcd21a7"}, - {file = "caio-0.9.22-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ae19a37b7294dc15c8775ec657464a4370d1f8351d0fc3a284a9f014514b9be5"}, - {file = "caio-0.9.22-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:70e6a12c9d1db8fb7a07c5193ca352df1488787f1cf1c3814879dec754135031"}, - {file = "caio-0.9.22-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:3f2f7f22c60844165359a285c5d38ca0d4ab10ca6f6def79abf6243d019df64b"}, - {file = "caio-0.9.22.tar.gz", hash = "sha256:7ea533d90e5fa0bba33bc8f4805b4c90f19e8d8ac5139a8033b92a6ab5c4012b"}, -] - -[package.extras] -develop = ["aiomisc-pytest", "pytest", "pytest-cov"] - [[package]] name = "cel-python" version = "0.2.0" @@ -3370,4 +3323,4 @@ propcache = ">=0.2.1" [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "61d5d168a102d274f2557b8dcccca6a4ac9f69825b2647ed52bbd62858d4d0c7" +content-hash = "3ff97fe1bee28ff24e8a19331e646a2d66a5800cfef2e02104ea0dbd4fae7404" diff --git a/projects/renku_data_tasks/pyproject.toml b/projects/renku_data_tasks/pyproject.toml index 4bc5e716a..8130bf3cf 100644 --- a/projects/renku_data_tasks/pyproject.toml +++ b/projects/renku_data_tasks/pyproject.toml @@ -24,7 +24,6 @@ packages = [ { include = "renku_data_services/errors", from = "../../components" }, { include = "renku_data_services/git", from = "../../components" }, { include = "renku_data_services/k8s", from = "../../components" }, - { include = "renku_data_services/k8s_watcher", from = "../../components" }, { include = "renku_data_services/message_queue", from = "../../components" }, { include = "renku_data_services/namespace", from = "../../components" }, { include = "renku_data_services/platform", from = "../../components" }, @@ -68,7 +67,7 @@ sentry-sdk = { version = "^2.22.0", extras = ["sanic"] } authzed = "^1.20.0" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore setuptools = { version = "^75.8.2" } -aiofile = "^3.9.0" +aiofiles = "^24.1.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. diff --git a/projects/secrets_storage/poetry.lock b/projects/secrets_storage/poetry.lock index d0dfee28f..b16dae5c9 100644 --- a/projects/secrets_storage/poetry.lock +++ b/projects/secrets_storage/poetry.lock @@ -1,32 +1,152 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. [[package]] -name = "aiofile" -version = "3.9.0" -description = "Asynchronous file operations." +name = "aiofiles" +version = "24.1.0" +description = "File support for asyncio." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, + {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" optional = false -python-versions = "<4,>=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiofile-3.9.0-py3-none-any.whl", hash = "sha256:ce2f6c1571538cbdfa0143b04e16b208ecb0e9cb4148e528af8a640ed51cc8aa"}, - {file = "aiofile-3.9.0.tar.gz", hash = "sha256:e5ad718bb148b265b6df1b3752c4d1d83024b93da9bd599df74b9d9ffcf7919b"}, + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.12.14" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248"}, + {file = "aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb"}, + {file = "aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd"}, + {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c"}, + {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95"}, + {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663"}, + {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1"}, + {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61"}, + {file = "aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656"}, + {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3"}, + {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288"}, + {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda"}, + {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc"}, + {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8"}, + {file = "aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3"}, + {file = "aiohttp-3.12.14-cp310-cp310-win32.whl", hash = "sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c"}, + {file = "aiohttp-3.12.14-cp310-cp310-win_amd64.whl", hash = "sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db"}, + {file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597"}, + {file = "aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393"}, + {file = "aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179"}, + {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb"}, + {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245"}, + {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b"}, + {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641"}, + {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe"}, + {file = "aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7"}, + {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635"}, + {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da"}, + {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419"}, + {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab"}, + {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0"}, + {file = "aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28"}, + {file = "aiohttp-3.12.14-cp311-cp311-win32.whl", hash = "sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b"}, + {file = "aiohttp-3.12.14-cp311-cp311-win_amd64.whl", hash = "sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced"}, + {file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22"}, + {file = "aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a"}, + {file = "aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff"}, + {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d"}, + {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869"}, + {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c"}, + {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7"}, + {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660"}, + {file = "aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088"}, + {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7"}, + {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9"}, + {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3"}, + {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb"}, + {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425"}, + {file = "aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0"}, + {file = "aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729"}, + {file = "aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338"}, + {file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767"}, + {file = "aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e"}, + {file = "aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63"}, + {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d"}, + {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab"}, + {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4"}, + {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026"}, + {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd"}, + {file = "aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88"}, + {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086"}, + {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933"}, + {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151"}, + {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8"}, + {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3"}, + {file = "aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758"}, + {file = "aiohttp-3.12.14-cp313-cp313-win32.whl", hash = "sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5"}, + {file = "aiohttp-3.12.14-cp313-cp313-win_amd64.whl", hash = "sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa"}, + {file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b8cc6b05e94d837bcd71c6531e2344e1ff0fb87abe4ad78a9261d67ef5d83eae"}, + {file = "aiohttp-3.12.14-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1dcb015ac6a3b8facd3677597edd5ff39d11d937456702f0bb2b762e390a21b"}, + {file = "aiohttp-3.12.14-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3779ed96105cd70ee5e85ca4f457adbce3d9ff33ec3d0ebcdf6c5727f26b21b3"}, + {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:717a0680729b4ebd7569c1dcd718c46b09b360745fd8eb12317abc74b14d14d0"}, + {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b5dd3a2ef7c7e968dbbac8f5574ebeac4d2b813b247e8cec28174a2ba3627170"}, + {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4710f77598c0092239bc12c1fcc278a444e16c7032d91babf5abbf7166463f7b"}, + {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f3e9f75ae842a6c22a195d4a127263dbf87cbab729829e0bd7857fb1672400b2"}, + {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f9c8d55d6802086edd188e3a7d85a77787e50d56ce3eb4757a3205fa4657922"}, + {file = "aiohttp-3.12.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79b29053ff3ad307880d94562cca80693c62062a098a5776ea8ef5ef4b28d140"}, + {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:23e1332fff36bebd3183db0c7a547a1da9d3b4091509f6d818e098855f2f27d3"}, + {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:a564188ce831fd110ea76bcc97085dd6c625b427db3f1dbb14ca4baa1447dcbc"}, + {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a7a1b4302f70bb3ec40ca86de82def532c97a80db49cac6a6700af0de41af5ee"}, + {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:1b07ccef62950a2519f9bfc1e5b294de5dd84329f444ca0b329605ea787a3de5"}, + {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:938bd3ca6259e7e48b38d84f753d548bd863e0c222ed6ee6ace3fd6752768a84"}, + {file = "aiohttp-3.12.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8bc784302b6b9f163b54c4e93d7a6f09563bd01ff2b841b29ed3ac126e5040bf"}, + {file = "aiohttp-3.12.14-cp39-cp39-win32.whl", hash = "sha256:a3416f95961dd7d5393ecff99e3f41dc990fb72eda86c11f2a60308ac6dcd7a0"}, + {file = "aiohttp-3.12.14-cp39-cp39-win_amd64.whl", hash = "sha256:196858b8820d7f60578f8b47e5669b3195c21d8ab261e39b1d705346458f445f"}, + {file = "aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2"}, ] [package.dependencies] -caio = ">=0.9.0,<0.10.0" +aiohappyeyeballs = ">=2.5.0" +aiosignal = ">=1.4.0" +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.3.0)", "brotlicffi ; platform_python_implementation != \"CPython\""] [[package]] -name = "aiofiles" -version = "24.1.0" -description = "File support for asyncio." +name = "aiosignal" +version = "1.4.0" +description = "aiosignal: a list of registered asynchronous callbacks" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "aiofiles-24.1.0-py3-none-any.whl", hash = "sha256:b4ec55f4195e3eb5d7abd1bf7e061763e864dd4954231fb8539a0ef8bb8260e5"}, - {file = "aiofiles-24.1.0.tar.gz", hash = "sha256:22a075c9e5a3810f0c2e48f3008c94d68c65d763b9b03857924c99e57355166c"}, + {file = "aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e"}, + {file = "aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7"}, ] +[package.dependencies] +frozenlist = ">=1.1.0" + [[package]] name = "alembic" version = "1.15.2" @@ -174,6 +294,26 @@ docs = ["Sphinx (>=8.1.3,<8.2.0)", "sphinx-rtd-theme (>=1.2.2)"] gssauth = ["gssapi ; platform_system != \"Windows\"", "sspilib ; platform_system == \"Windows\""] test = ["distro (>=1.9.0,<1.10.0)", "flake8 (>=6.1,<7.0)", "flake8-pyi (>=24.1.0,<24.2.0)", "gssapi ; platform_system == \"Linux\"", "k5test ; platform_system == \"Linux\"", "mypy (>=1.8.0,<1.9.0)", "sspilib ; platform_system == \"Windows\"", "uvloop (>=0.15.3) ; platform_system != \"Windows\" and python_version < \"3.14.0\""] +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + [[package]] name = "authlib" version = "1.6.0" @@ -277,35 +417,6 @@ files = [ {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] -[[package]] -name = "caio" -version = "0.9.24" -description = "Asynchronous file IO for Linux MacOS or Windows." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "caio-0.9.24-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d80322126a97ba572412b17b2f086ff95195de2c4261deb19db6bfcdc9ef7540"}, - {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_aarch64.whl", hash = "sha256:37bc172349686139e8dc97fff7662c67b1837e18a67b99e8ef25585f2893d013"}, - {file = "caio-0.9.24-cp310-cp310-manylinux_2_34_x86_64.whl", hash = "sha256:ad7f0902bf952237e120606252c14ab3cb05995c9f79f39154b5248744864832"}, - {file = "caio-0.9.24-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:925b9e3748ce1a79386dfb921c0aee450e43225534551abd1398b1c08f9ba29f"}, - {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_aarch64.whl", hash = "sha256:3b4dc0a8fb9a58ab40f967ad5a8a858cc0bfb2348a580b4142595849457f9c9a"}, - {file = "caio-0.9.24-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:fa74d111b3b165bfad2e333367976bdf118bcf505a1cb44d3bcddea2849e3297"}, - {file = "caio-0.9.24-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ae3566228383175265a7583107f21a7cb044a752ea29ba84fce7c1a49a05903"}, - {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_aarch64.whl", hash = "sha256:a306b0dda91cb4ca3170f066c114597f8ea41b3da578574a9d2b54f86963de68"}, - {file = "caio-0.9.24-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:8ee158e56128d865fb7d57a9c9c22fca4e8aa8d8664859c977a36fff3ccb3609"}, - {file = "caio-0.9.24-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d47ef8d76aca74c17cb07339a441c5530fc4b8dd9222dfb1e1abd7f9f9b814f"}, - {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_aarch64.whl", hash = "sha256:d15fc746c4bf0077d75df05939d1e97c07ccaa8e580681a77021d6929f65d9f4"}, - {file = "caio-0.9.24-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:9368eae0a9badd5f31264896c51b47431d96c0d46f1979018fb1d20c49f56156"}, - {file = "caio-0.9.24-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f0e5a645ef4e7bb7a81e10ae2a7aef14988cb2cb4354588c6bf6f6f3f6de72a"}, - {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_aarch64.whl", hash = "sha256:08304fa80af7771c78a5bcc923449c7ec8134d589b50d48c66320f85552c7ae2"}, - {file = "caio-0.9.24-cp39-cp39-manylinux_2_34_x86_64.whl", hash = "sha256:5339ced0764e10242a50ccb21db7f0d9c359881db0f72fa2c5e45ed828ffacf7"}, - {file = "caio-0.9.24.tar.gz", hash = "sha256:5bcdecaea02a9aa8e3acf0364eff8ad9903d57d70cdb274a42270126290a77f1"}, -] - -[package.extras] -develop = ["aiomisc-pytest", "coveralls", "pylama[toml]", "pytest", "pytest-cov", "setuptools"] - [[package]] name = "cel-python" version = "0.2.0" @@ -723,6 +834,120 @@ files = [ {file = "escapism-1.0.1.tar.gz", hash = "sha256:73256bdfb4f22230f0428fc6efecee61cdc4fad531b6f98b849cb9c80711e4ec"}, ] +[[package]] +name = "frozenlist" +version = "1.7.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a"}, + {file = "frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61"}, + {file = "frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615"}, + {file = "frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd"}, + {file = "frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718"}, + {file = "frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e"}, + {file = "frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750"}, + {file = "frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86"}, + {file = "frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898"}, + {file = "frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56"}, + {file = "frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7"}, + {file = "frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb"}, + {file = "frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e"}, + {file = "frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08"}, + {file = "frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43"}, + {file = "frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3"}, + {file = "frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d"}, + {file = "frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60"}, + {file = "frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b"}, + {file = "frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e"}, + {file = "frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1"}, + {file = "frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d"}, + {file = "frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384"}, + {file = "frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104"}, + {file = "frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf"}, + {file = "frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81"}, + {file = "frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:cea3dbd15aea1341ea2de490574a4a37ca080b2ae24e4b4f4b51b9057b4c3630"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d536ee086b23fecc36c2073c371572374ff50ef4db515e4e503925361c24f71"}, + {file = "frozenlist-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dfcebf56f703cb2e346315431699f00db126d158455e513bd14089d992101e44"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974c5336e61d6e7eb1ea5b929cb645e882aadab0095c5a6974a111e6479f8878"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c70db4a0ab5ab20878432c40563573229a7ed9241506181bba12f6b7d0dc41cb"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1137b78384eebaf70560a36b7b229f752fb64d463d38d1304939984d5cb887b6"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e793a9f01b3e8b5c0bc646fb59140ce0efcc580d22a3468d70766091beb81b35"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:74739ba8e4e38221d2c5c03d90a7e542cb8ad681915f4ca8f68d04f810ee0a87"}, + {file = "frozenlist-1.7.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e63344c4e929b1a01e29bc184bbb5fd82954869033765bfe8d65d09e336a677"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2ea2a7369eb76de2217a842f22087913cdf75f63cf1307b9024ab82dfb525938"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:836b42f472a0e006e02499cef9352ce8097f33df43baaba3e0a28a964c26c7d2"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e22b9a99741294b2571667c07d9f8cceec07cb92aae5ccda39ea1b6052ed4319"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:9a19e85cc503d958abe5218953df722748d87172f71b73cf3c9257a91b999890"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:f22dac33bb3ee8fe3e013aa7b91dc12f60d61d05b7fe32191ffa84c3aafe77bd"}, + {file = "frozenlist-1.7.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ccec739a99e4ccf664ea0775149f2749b8a6418eb5b8384b4dc0a7d15d304cb"}, + {file = "frozenlist-1.7.0-cp39-cp39-win32.whl", hash = "sha256:b3950f11058310008a87757f3eee16a8e1ca97979833239439586857bc25482e"}, + {file = "frozenlist-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:43a82fce6769c70f2f5a06248b614a7d268080a9d20f7457ef10ecee5af82b63"}, + {file = "frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e"}, + {file = "frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f"}, +] + [[package]] name = "genson" version = "1.3.0" @@ -1208,6 +1433,26 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "kubernetes-asyncio" +version = "32.3.2" +description = "Kubernetes asynchronous python client" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "kubernetes_asyncio-32.3.2-py3-none-any.whl", hash = "sha256:3584e6358571e686ea1396fc310890263c58fa41c084a841a9609a54ad05de62"}, + {file = "kubernetes_asyncio-32.3.2.tar.gz", hash = "sha256:a4b6da1dbfa16e87ab8df6898f54fa03f651591a502bd6480a647d5513a580bf"}, +] + +[package.dependencies] +aiohttp = ">=3.9.0,<4.0.0" +certifi = ">=14.05.14" +python-dateutil = ">=2.5.3" +pyyaml = ">=3.12" +six = ">=1.9.0" +urllib3 = ">=1.24.2" + [[package]] name = "lark" version = "0.12.0" @@ -1615,6 +1860,114 @@ files = [ prometheus-client = ">=0.7.1,<0.8.0" sanic = ">=22.0.0" +[[package]] +name = "propcache" +version = "0.3.2" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770"}, + {file = "propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3"}, + {file = "propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614"}, + {file = "propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b"}, + {file = "propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c"}, + {file = "propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70"}, + {file = "propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f"}, + {file = "propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df"}, + {file = "propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf"}, + {file = "propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e"}, + {file = "propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897"}, + {file = "propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154"}, + {file = "propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67"}, + {file = "propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06"}, + {file = "propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1"}, + {file = "propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1"}, + {file = "propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252"}, + {file = "propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3"}, + {file = "propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206"}, + {file = "propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43"}, + {file = "propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02"}, + {file = "propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0"}, + {file = "propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725"}, + {file = "propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770"}, + {file = "propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330"}, + {file = "propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394"}, + {file = "propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a7fad897f14d92086d6b03fdd2eb844777b0c4d7ec5e3bac0fbae2ab0602bbe5"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1f43837d4ca000243fd7fd6301947d7cb93360d03cd08369969450cc6b2ce3b4"}, + {file = "propcache-0.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:261df2e9474a5949c46e962065d88eb9b96ce0f2bd30e9d3136bcde84befd8f2"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e514326b79e51f0a177daab1052bc164d9d9e54133797a3a58d24c9c87a3fe6d"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4a996adb6904f85894570301939afeee65f072b4fd265ed7e569e8d9058e4ec"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76cace5d6b2a54e55b137669b30f31aa15977eeed390c7cbfb1dafa8dfe9a701"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31248e44b81d59d6addbb182c4720f90b44e1efdc19f58112a3c3a1615fb47ef"}, + {file = "propcache-0.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abb7fa19dbf88d3857363e0493b999b8011eea856b846305d8c0512dfdf8fbb1"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d81ac3ae39d38588ad0549e321e6f773a4e7cc68e7751524a22885d5bbadf886"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:cc2782eb0f7a16462285b6f8394bbbd0e1ee5f928034e941ffc444012224171b"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:db429c19a6c7e8a1c320e6a13c99799450f411b02251fb1b75e6217cf4a14fcb"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:21d8759141a9e00a681d35a1f160892a36fb6caa715ba0b832f7747da48fb6ea"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2ca6d378f09adb13837614ad2754fa8afaee330254f404299611bce41a8438cb"}, + {file = "propcache-0.3.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:34a624af06c048946709f4278b4176470073deda88d91342665d95f7c6270fbe"}, + {file = "propcache-0.3.2-cp39-cp39-win32.whl", hash = "sha256:4ba3fef1c30f306b1c274ce0b8baaa2c3cdd91f645c48f06394068f37d3837a1"}, + {file = "propcache-0.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:7a2368eed65fc69a7a7a40b27f22e85e7627b74216f0846b04ba5c116e191ec9"}, + {file = "propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f"}, + {file = "propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168"}, +] + [[package]] name = "protobuf" version = "5.29.4" @@ -2853,7 +3206,126 @@ files = [ [package.dependencies] h11 = ">=0.9.0,<1" +[[package]] +name = "yarl" +version = "1.20.1" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4"}, + {file = "yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a"}, + {file = "yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23"}, + {file = "yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24"}, + {file = "yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13"}, + {file = "yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8"}, + {file = "yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b"}, + {file = "yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8"}, + {file = "yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1"}, + {file = "yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e"}, + {file = "yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773"}, + {file = "yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a"}, + {file = "yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd"}, + {file = "yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a"}, + {file = "yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004"}, + {file = "yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5"}, + {file = "yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3"}, + {file = "yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5"}, + {file = "yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b"}, + {file = "yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1"}, + {file = "yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7"}, + {file = "yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf"}, + {file = "yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3"}, + {file = "yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458"}, + {file = "yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e"}, + {file = "yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d"}, + {file = "yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e42ba79e2efb6845ebab49c7bf20306c4edf74a0b20fc6b2ccdd1a219d12fad3"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:41493b9b7c312ac448b7f0a42a089dffe1d6e6e981a2d76205801a023ed26a2b"}, + {file = "yarl-1.20.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f5a5928ff5eb13408c62a968ac90d43f8322fd56d87008b8f9dabf3c0f6ee983"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30c41ad5d717b3961b2dd785593b67d386b73feca30522048d37298fee981805"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:59febc3969b0781682b469d4aca1a5cab7505a4f7b85acf6db01fa500fa3f6ba"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d2b6fb3622b7e5bf7a6e5b679a69326b4279e805ed1699d749739a61d242449e"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:749d73611db8d26a6281086f859ea7ec08f9c4c56cec864e52028c8b328db723"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9427925776096e664c39e131447aa20ec738bdd77c049c48ea5200db2237e000"}, + {file = "yarl-1.20.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff70f32aa316393eaf8222d518ce9118148eddb8a53073c2403863b41033eed5"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c7ddf7a09f38667aea38801da8b8d6bfe81df767d9dfc8c88eb45827b195cd1c"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:57edc88517d7fc62b174fcfb2e939fbc486a68315d648d7e74d07fac42cec240"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:dab096ce479d5894d62c26ff4f699ec9072269d514b4edd630a393223f45a0ee"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:14a85f3bd2d7bb255be7183e5d7d6e70add151a98edf56a770d6140f5d5f4010"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c89b5c792685dd9cd3fa9761c1b9f46fc240c2a3265483acc1565769996a3f8"}, + {file = "yarl-1.20.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:69e9b141de5511021942a6866990aea6d111c9042235de90e08f94cf972ca03d"}, + {file = "yarl-1.20.1-cp39-cp39-win32.whl", hash = "sha256:b5f307337819cdfdbb40193cad84978a029f847b0a357fbe49f712063cfc4f06"}, + {file = "yarl-1.20.1-cp39-cp39-win_amd64.whl", hash = "sha256:eae7bfe2069f9c1c5b05fc7fe5d612e5bbc089a39309904ee8b829e322dcad00"}, + {file = "yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77"}, + {file = "yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "f026cd687c8ddf81d556e50abb84521956adac7e28d4b78602c7b04731a98b78" +content-hash = "8485ea470b23747241ba91df9771f6302689b14ebbf1c02059d690f28eb99510" diff --git a/projects/secrets_storage/pyproject.toml b/projects/secrets_storage/pyproject.toml index 626b18bd4..8bbcadf75 100644 --- a/projects/secrets_storage/pyproject.toml +++ b/projects/secrets_storage/pyproject.toml @@ -25,7 +25,6 @@ packages = [ { include = "renku_data_services/errors", from = "../../components" }, { include = "renku_data_services/git", from = "../../components" }, { include = "renku_data_services/k8s", from = "../../components" }, - { include = "renku_data_services/k8s_watcher", from = "../../components" }, { include = "renku_data_services/message_queue", from = "../../components" }, { include = "renku_data_services/migrations", from = "../../components" }, { include = "renku_data_services/namespace", from = "../../components" }, @@ -56,6 +55,7 @@ pyjwt = { extras = ["crypto"], version = "^2.10.1" } tenacity = "^9.0.0" httpx = "<0.29" kubernetes = "^31.0.0" +kubernetes-asyncio = "^32.0.0" python-ulid = "^3.0.0" python-gitlab = "^5.6.0" psycopg = { version = "^3.2.3", extras = ["binary"] } @@ -69,7 +69,7 @@ cryptography = "^44.0.1" authzed = "^1.20.0" # see https://github.com/sanic-org/sanic/issues/2828 for setuptools dependency, remove when not needed anymore setuptools = { version = "^75.8.2" } -aiofile = "^3.9.0" +aiofiles = "^24.1.0" # Not a direct dependency, it is needed by authzed. Was causing things to crash at startup beacuse of # google.protobuf.runtime_version.VersionError: # Detected incompatible Protobuf Gencode/Runtime versions when loading authzed/api/v1/core.proto: gencode 5.28.2 runtime 5.27.3. diff --git a/test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr b/test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr index 33c594a3e..b4b4616a6 100644 --- a/test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr +++ b/test/bases/renku_data_services/data_api/__snapshots__/test_projects.ambr @@ -82,6 +82,7 @@ 'mount_directory': '/home/renku/work', 'name': 'Launcher', 'port': 8888, + 'strip_path_prefix': False, 'uid': 1000, 'working_directory': '/home/renku/work', }) diff --git a/test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr b/test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr index a508d42ef..8b7fe8acd 100644 --- a/test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr +++ b/test/bases/renku_data_services/data_api/__snapshots__/test_sessions.ambr @@ -17,6 +17,7 @@ 'mount_directory': '/home/jovyan/work', 'name': 'Python/Jupyter', 'port': 8888, + 'strip_path_prefix': False, 'uid': 1000, 'working_directory': '/home/jovyan/work', }), @@ -36,6 +37,7 @@ 'mount_directory': '/home/jovyan/work', 'name': 'Rstudio', 'port': 8888, + 'strip_path_prefix': False, 'uid': 1000, 'working_directory': '/home/jovyan/work', }), @@ -47,6 +49,7 @@ 'is_archived': False, 'name': 'Environment 1', 'port': 8080, + 'strip_path_prefix': False, 'uid': 1000, }), dict({ @@ -57,6 +60,7 @@ 'is_archived': False, 'name': 'Environment 2', 'port': 8080, + 'strip_path_prefix': False, 'uid': 1000, }), dict({ @@ -67,6 +71,7 @@ 'is_archived': False, 'name': 'Environment 3', 'port': 8080, + 'strip_path_prefix': False, 'uid': 1000, }), dict({ @@ -77,6 +82,7 @@ 'is_archived': True, 'name': 'Environment 4', 'port': 8080, + 'strip_path_prefix': False, 'uid': 1000, }), ]) @@ -94,6 +100,7 @@ 'is_archived': False, 'name': 'Test', 'port': 8080, + 'strip_path_prefix': False, 'uid': 1000, }), 'name': 'Launcher 1', @@ -109,6 +116,7 @@ 'is_archived': False, 'name': 'Test', 'port': 8080, + 'strip_path_prefix': False, 'uid': 1000, }), 'name': 'Launcher 2', @@ -124,6 +132,7 @@ 'is_archived': False, 'name': 'Test', 'port': 8080, + 'strip_path_prefix': False, 'uid': 1000, }), 'name': 'Launcher 3', diff --git a/test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr b/test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr index e2c9ff941..5b4f0a80a 100644 --- a/test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr +++ b/test/bases/renku_data_services/data_api/__snapshots__/test_storage.ambr @@ -9029,38 +9029,6 @@ 'description': 'premiumize.me', 'name': 'premiumizeme', 'options': list([ - dict({ - 'advanced': False, - 'default': '', - 'default_str': '', - 'exclusive': False, - 'help': ''' - OAuth Client Id. - - Leave blank normally. - ''', - 'ispassword': False, - 'name': 'client_id', - 'required': False, - 'sensitive': True, - 'type': 'string', - }), - dict({ - 'advanced': False, - 'default': '', - 'default_str': '', - 'exclusive': False, - 'help': ''' - OAuth Client Secret. - - Leave blank normally. - ''', - 'ispassword': False, - 'name': 'client_secret', - 'required': False, - 'sensitive': True, - 'type': 'string', - }), dict({ 'advanced': True, 'default': '', @@ -14699,24 +14667,6 @@ 'sensitive': True, 'type': 'string', }), - dict({ - 'advanced': False, - 'default': '', - 'default_str': '', - 'exclusive': False, - 'help': ''' - Path to PEM-encoded private key file. - - Leave blank or set key-use-agent to use ssh-agent. - - Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. - ''', - 'ispassword': False, - 'name': 'key_file', - 'required': False, - 'sensitive': False, - 'type': 'string', - }), dict({ 'advanced': False, 'default': '', @@ -14750,48 +14700,6 @@ 'sensitive': False, 'type': 'string', }), - dict({ - 'advanced': False, - 'default': '', - 'default_str': '', - 'exclusive': False, - 'help': ''' - Optional path to public key file. - - Set this if you have a signed certificate you want to use for authentication. - - Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. - ''', - 'ispassword': False, - 'name': 'pubkey_file', - 'required': False, - 'sensitive': False, - 'type': 'string', - }), - dict({ - 'advanced': True, - 'default': '', - 'default_str': '', - 'examples': list([ - dict({ - 'help': "Use OpenSSH's known_hosts file.", - 'value': '~/.ssh/known_hosts', - }), - ]), - 'exclusive': False, - 'help': ''' - Optional path to known_hosts file. - - Set this value to enable server host key validation. - - Leading `~` will be expanded in the file name as will environment variables such as `${RCLONE_CONFIG_DIR}`. - ''', - 'ispassword': False, - 'name': 'known_hosts_file', - 'required': False, - 'sensitive': False, - 'type': 'string', - }), dict({ 'advanced': False, 'default': False, @@ -15344,46 +15252,6 @@ 'sensitive': False, 'type': 'SpaceSepList', }), - dict({ - 'advanced': False, - 'default': list([ - ]), - 'default_str': '', - 'exclusive': False, - 'help': ''' - Path and arguments to external ssh binary. - - Normally rclone will use its internal ssh library to connect to the - SFTP server. However it does not implement all possible ssh options so - it may be desirable to use an external ssh binary. - - Rclone ignores all the internal config if you use this option and - expects you to configure the ssh binary with the user/host/port and - any other options you need. - - **Important** The ssh command must log in without asking for a - password so needs to be configured with keys or certificates. - - Rclone will run the command supplied either with the additional - arguments "-s sftp" to access the SFTP subsystem or with commands such - as "md5sum /path/to/file" appended to read checksums. - - Any arguments with spaces in should be surrounded by "double quotes". - - An example setting might be: - - ssh -o ServerAliveInterval=20 user@example.com - - Note that when using an external ssh binary rclone makes a new ssh - connection for every hash it calculates. - - ''', - 'ispassword': False, - 'name': 'ssh', - 'required': False, - 'sensitive': False, - 'type': 'SpaceSepList', - }), dict({ 'advanced': True, 'default': '', diff --git a/test/bases/renku_data_services/data_api/test_connected_services.py b/test/bases/renku_data_services/data_api/test_connected_services.py index 7c2ba1c61..1847f38c7 100644 --- a/test/bases/renku_data_services/data_api/test_connected_services.py +++ b/test/bases/renku_data_services/data_api/test_connected_services.py @@ -214,9 +214,12 @@ async def test_patch_oauth2_provider(sanic_client: SanicASGITestClient, admin_he payload = { "app_slug": "my-new-example", + "kind": "generic_oidc", "display_name": "New display name", "scope": "read write", "url": "https://my-new-example.org", + "image_registry_url": "https://a-registry/", + "oidc_issuer_url": "https://my-issuer", } _, res = await sanic_client.patch(f"/api/data/oauth2/providers/{provider_id}", headers=admin_headers, json=payload) @@ -224,9 +227,12 @@ async def test_patch_oauth2_provider(sanic_client: SanicASGITestClient, admin_he assert res.status_code == 200, res.text assert res.json is not None assert res.json.get("app_slug") == "my-new-example" + assert res.json.get("kind") == "generic_oidc" assert res.json.get("display_name") == "New display name" assert res.json.get("scope") == "read write" assert res.json.get("url") == "https://my-new-example.org" + assert res.json.get("image_registry_url") == "https://a-registry/" + assert res.json.get("oidc_issuer_url") == "https://my-issuer" @pytest.mark.asyncio @@ -400,7 +406,7 @@ async def test_get_installations_gitlab( async def test_get_installations_github( oauth2_test_client: SanicASGITestClient, user_headers, create_oauth2_connection ): - connection = await create_oauth2_connection("provider_1", kind="github") + connection = await create_oauth2_connection("provider_1", kind="github", url="https://github.com") connection_id = connection["id"] _, res = await oauth2_test_client.get( @@ -421,3 +427,20 @@ async def test_get_installations_github( assert res.headers.get("per-page") == "20" assert res.headers.get("total") == "1" assert res.headers.get("total-pages") == "1" + + +@pytest.mark.asyncio +async def test_get_no_installations_ghcrio( + oauth2_test_client: SanicASGITestClient, user_headers, create_oauth2_connection +): + connection = await create_oauth2_connection("provider_1", kind="github", image_registry_url="https://ghcr.io") + connection_id = connection["id"] + + _, res = await oauth2_test_client.get( + f"/api/data/oauth2/connections/{connection_id}/installations", headers=user_headers + ) + + assert res.status_code == 200, res.text + assert res.json is not None + installations_list = res.json + assert len(installations_list) == 0 diff --git a/test/bases/renku_data_services/data_api/test_data_connectors.py b/test/bases/renku_data_services/data_api/test_data_connectors.py index 7f64a4306..38c7e6fd1 100644 --- a/test/bases/renku_data_services/data_api/test_data_connectors.py +++ b/test/bases/renku_data_services/data_api/test_data_connectors.py @@ -9,6 +9,7 @@ from renku_data_services.authz.models import Visibility from renku_data_services.base_models.core import NamespacePath, ProjectPath from renku_data_services.data_connectors import core +from renku_data_services.data_connectors.doi.models import DOIMetadata from renku_data_services.namespace.models import NamespaceKind from renku_data_services.storage.rclone import RCloneDOIMetadata from renku_data_services.users.models import UserInfo @@ -111,6 +112,12 @@ async def test_post_global_data_connector( provider="zenodo", ) _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + zenodo_metadata = DOIMetadata( + name="SwissDataScienceCenter/renku-python: Version 0.7.2", + description="""0.7.2 (2019-11-15)\nBug Fixes\n
    \n
  • ensure all Person instances have valid ids (85585d0), addresses #812
  • \n
""", # noqa E501 + keywords=[], + ) + _mock_get_dataset_metadata(metadata=zenodo_metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) payload = { "storage": { @@ -165,6 +172,12 @@ async def test_post_global_data_connector_dataverse( provider="dataverse", ) _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + dataverse_metadata = DOIMetadata( + name="Dataset metadata of known Dataverse installations, August 2024", + description="""

This dataset contains the metadata of the datasets published in 101 Dataverse installations, information about the metadata blocks of 106 installations, and the lists of pre-defined licenses or dataset terms that depositors can apply to datasets in the 88 installations that were running versions of the Dataverse software that include the "multiple-license" feature.\n\n

The data is useful for improving understandings about how certain Dataverse features and metadata fields are used and for learning about the quality of dataset and file-level metadata within and across Dataverse installations.\n\n

How the metadata was downloaded\n

The dataset metadata and metadata block JSON files were downloaded from each installation between August 25 and August 30, 2024 using a "get_dataverse_installations_metadata" function in a collection of Python functions at https://github.com/jggautier/dataverse-scripts/blob/main/dataverse_repository_curation_assistant/dataverse_repository_curation_assistant_functions.py.\n\n

In order to get the metadata from installations that require an installation account API token to use certain Dataverse software APIs, I created a CSV file with two columns: one column named "hostname" listing each installation URL for which I was able to create an account and another column named "apikey" listing my accounts\' API tokens. The Python script expects the CSV file and the listed API tokens to get metadata and other information from installations that require API tokens in order to use certain API endpoints.\n\n

How the files are organized\n\n

\n├── csv_files_with_metadata_from_most_known_dataverse_installations\n│\xa0\xa0 ├── author_2024.08.25-2024.08.30.csv\n│\xa0\xa0 ├── contributor_2024.08.25-2024.08.30.csv\n│\xa0\xa0 ├── data_source_2024.08.25-2024.08.30.csv\n│\xa0\xa0 ├── ...\n│\xa0\xa0 └── topic_classification_2024.08.25-2024.08.30.csv\n├── dataverse_json_metadata_from_each_known_dataverse_installation\n│\xa0\xa0 ├── Abacus_2024.08.26_15.52.42.zip\n│\xa0\xa0\xa0\xa0\xa0\xa0 ├── dataset_pids_Abacus_2024.08.26_15.52.42.csv\n│\xa0\xa0\xa0\xa0\xa0\xa0 ├── Dataverse_JSON_metadata_2024.08.26_15.52.42\n│\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0  ├── hdl_11272.1_AB2_0AQZNT_v1.0(latest_version).json\n│\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0  ├── ...\n│\xa0\xa0\xa0\xa0\xa0\xa0 ├── metadatablocks_v5.9\n│\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0  ├── astrophysics_v5.9.json\n│\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0  ├── biomedical_v5.9.json\n│\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0  ├── citation_v5.9.json\n│\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0  ├── ...\n│\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0\xa0  ├── socialscience_v5.6.json\n│\xa0\xa0 ├── ACSS_Dataverse_2024.08.26_00.02.51.zip\n│\xa0\xa0 ├── ...\n│\xa0\xa0 └── Yale_Dataverse_2024.08.25_03.52.57.zip\n└── dataverse_installations_summary_2024.08.30.csv\n└── dataset_pids_from_most_known_dataverse_installations_2024.08.csv\n└── license_options_for_each_dataverse_installation_2024.08.28_14.42.54.csv\n└── metadatablocks_from_most_known_dataverse_installations_2024.08.30.csv\n\n
\n\n

This dataset contains two directories and four CSV files not in a directory.\n

One directory, "csv_files_with_metadata_from_most_known_dataverse_installations", contains 20 CSV files that list the values of many of the metadata fields in the "Citation" metadata block and "Geospatial" metadata block of datasets in the 101 Dataverse installations. For example, author_2024.08.25-2024.08.30.csv contains the "Author" metadata for the latest versions of all published, non-deaccessioned datasets in 101 installations, with a column for each of the four child fields: author name, affiliation, identifier type, and identifier.\n

The other directory, "dataverse_json_metadata_from_each_known_dataverse_installation", contains 106 zip files, one zip file for each of the 106 Dataverse installations whose sites were functioning when I attempted to collect their metadata. Each zip file contains a directory with JSON files that have information about the installation\'s metadata fields, such as the field names and how they\'re organized. For installations that had published datasets, and I was able to use Dataverse APIs to download the dataset metadata, the zip file also contains:\n

    \n
  • A CSV file listing information about the datasets published in the installation, including a column to indicate if the Python script was able to download the Dataverse JSON metadata for each dataset.\n
  • A directory of JSON files that contain the metadata of the installation\'s published, non-deaccessioned dataset versions in the Dataverse JSON metadata schema.\n
\n

The dataverse_installations_summary_2024.08.30.csv file contains information about each installation, including its name, URL, Dataverse software version, and counts of dataset metadata included and not included in this dataset.\n

The dataset_pids_from_most_known_dataverse_installations_2024.08.csv file contains the dataset PIDs of published datasets in 101 Dataverse installations, with a column to indicate if the Python script was able to download the dataset\'s metadata. It\'s a union of all "dataset_pids_....csv" files in each of the 101 zip files in the dataverse_json_metadata_from_each_known_dataverse_installation directory.\n

The license_options_for_each_dataverse_installation_2024.08.28_14.42.54.csv file contains information about the licenses and data use agreements that some installations let depositors choose when creating datasets. When I collected this data, 88 of the available 106 installations were running versions of the Dataverse software that allow depositors to choose a "predefined license or data use agreement" from a dropdown menu in the dataset deposit form. For more information about this Dataverse feature, see https://guides.dataverse.org/en/5.14/user/dataset-management.html#choosing-a-license.\n

The metadatablocks_from_most_known_dataverse_installations_2024.08.30.csv file contains the metadata block names, field names, child field names (if the field is a compound field), display names, descriptions/tooltip text, and watermarks of fields in the 106 Dataverse installations\' metadata blocks. This file is useful for learning about the metadata fields and field structures used in each installation.\n\n

Known errors\n

The metadata of a few datasets from several known and functioning installations could not be downloaded.\n

In some cases, this is because of download timeouts caused by the datasets\' relatively large metadata exports, which contain information about the datasets\' large number of versions and files.\n

In other cases, datasets were publicly findable but in unpublished or deaccessioned states that prevented me from downloading their metadata export.\n\n

About metadata blocks\n

Read about the Dataverse software\'s metadata blocks system at http://guides.dataverse.org/en/6.3/admin/metadatacustomization.html""", # noqa E501 + keywords=["dataset metadata", "dataverse", "metadata blocks"], + ) + _mock_get_dataset_metadata(metadata=dataverse_metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) doi = "10.7910/DVN/2SA6SN" payload = { @@ -240,6 +253,12 @@ async def test_post_global_data_connector_no_duplicates( provider="zenodo", ) _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + zenodo_metadata = DOIMetadata( + name="SwissDataScienceCenter/renku-python: Version 0.7.2", + description="""0.7.2 (2019-11-15)\nBug Fixes\n

    \n
  • ensure all Person instances have valid ids (85585d0), addresses #812
  • \n
""", # noqa E501 + keywords=[], + ) + _mock_get_dataset_metadata(metadata=zenodo_metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) doi = "10.5281/zenodo.2600782" payload = { @@ -933,6 +952,12 @@ async def test_patch_global_data_connector( provider="zenodo", ) _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + zenodo_metadata = DOIMetadata( + name="SwissDataScienceCenter/renku-python: Version 0.7.2", + description="""0.7.2 (2019-11-15)\nBug Fixes\n
    \n
  • ensure all Person instances have valid ids (85585d0), addresses #812
  • \n
""", # noqa E501 + keywords=[], + ) + _mock_get_dataset_metadata(metadata=zenodo_metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) doi = "10.5281/zenodo.2600782" payload = { @@ -1009,6 +1034,12 @@ async def test_delete_global_data_connector( provider="zenodo", ) _mock_get_doi_metadata(metadata=metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) + zenodo_metadata = DOIMetadata( + name="SwissDataScienceCenter/renku-python: Version 0.7.2", + description="""0.7.2 (2019-11-15)\nBug Fixes\n
    \n
  • ensure all Person instances have valid ids (85585d0), addresses #812
  • \n
""", # noqa E501 + keywords=[], + ) + _mock_get_dataset_metadata(metadata=zenodo_metadata, sanic_client=sanic_client, monkeypatch=monkeypatch) doi = "10.5281/zenodo.2600782" payload = { @@ -2414,3 +2445,34 @@ async def _mock_get_doi_metadata(*args, **kwargs) -> RCloneDOIMetadata: return metadata monkeypatch.setattr(validator, "get_doi_metadata", _mock_get_doi_metadata) + + +def _mock_get_dataset_metadata(metadata: DOIMetadata, sanic_client: SanicASGITestClient, monkeypatch: "MonkeyPatch"): + """Mock the _get_dataset_metadata_invenio method.""" + + # The Zenodo API may be unresponsive, so we mock its response + from renku_data_services.data_connectors.doi import metadata as metadata_mod + + _orig_get_dataset_metadata_invenio = metadata_mod._get_dataset_metadata_invenio + _orig_get_dataset_metadata_dataverse = metadata_mod._get_dataset_metadata_dataverse + + def _mock_get_dataset_metadata(original_fn): + async def _mock(*args, **kwargs) -> DOIMetadata | None: + fetched_metadata = await original_fn(*args, **kwargs) + if fetched_metadata is not None: + assert fetched_metadata == metadata + return fetched_metadata + + warnings.warn("Could not retrieve DOI metadata, returning saved one", stacklevel=2) + return metadata + + return _mock + + monkeypatch.setattr( + metadata_mod, "_get_dataset_metadata_invenio", _mock_get_dataset_metadata(_orig_get_dataset_metadata_invenio) + ) + monkeypatch.setattr( + metadata_mod, + "_get_dataset_metadata_dataverse", + _mock_get_dataset_metadata(_orig_get_dataset_metadata_dataverse), + ) diff --git a/test/bases/renku_data_services/data_api/test_migrations.py b/test/bases/renku_data_services/data_api/test_migrations.py index 157757935..e75dfc370 100644 --- a/test/bases/renku_data_services/data_api/test_migrations.py +++ b/test/bases/renku_data_services/data_api/test_migrations.py @@ -9,6 +9,7 @@ from alembic.script import ScriptDirectory from sanic_testing.testing import SanicASGITestClient from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.exc import IntegrityError from sqlalchemy.ext.asyncio import AsyncSession from sqlalchemy.sql import bindparam from ulid import ULID @@ -609,3 +610,152 @@ async def test_migration_to_dcb9648c3c15(app_manager_instance: DependencyManager assert k8s_objs[1].tuple()[0] == "amalthea.dev" assert k8s_objs[1].tuple()[1] == "v1alpha1" assert k8s_objs[1].tuple()[2] == "jupyterserver" + + +@pytest.mark.asyncio +async def test_migration_to_c8061499b966(app_manager_instance: DependencyManager, admin_user: UserInfo) -> None: + run_migrations_for_app("common", "e117405fed51") + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + await session.execute( + sa.text( + "INSERT into " + "common.k8s_objects(name, namespace, manifest, deleted, kind, version, cluster, user_id) " + "VALUES ('name_pod', 'ns', '{}', FALSE, 'pod', 'v1', 'renkulab', 'user_id')" + ) + ) + await session.execute( + sa.text( + "INSERT into " + "common.k8s_objects(name, namespace, manifest, deleted, kind, version, cluster, user_id) " + "VALUES ('name_js', 'ns', '{}', FALSE, 'jupyterserver', 'amalthea.dev/v1alpha1', 'renkulab', 'user_id')" + ) + ) + run_migrations_for_app("common", "c8061499b966") + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + k8s_objs = (await session.execute(sa.text("SELECT name, cluster FROM common.k8s_objects"))).all() + assert len(k8s_objs) == 2 + # Check that the cluster name was changed + assert k8s_objs[0].tuple()[1] == "0RENK1RENK2RENK3RENK4RENK5" + assert k8s_objs[1].tuple()[1] == "0RENK1RENK2RENK3RENK4RENK5" + id = ULID() + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + await session.execute( + sa.text( + "INSERT into " + "common.k8s_objects(name, namespace, manifest, deleted, kind, version, cluster, user_id) " + f"VALUES ('name_pod', 'ns', '{{}}', FALSE, 'pod', 'v1', '{id}', 'user_id')" + ) + ) + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + k8s_objs = (await session.execute(sa.text("SELECT name, cluster FROM common.k8s_objects"))).all() + # Check that we can insert another object with the same name, gvk, namespace, but a different cluster + assert len(k8s_objs) == 3 + + +@pytest.mark.asyncio +async def test_migration_to_66e2f1271cf6(app_manager_instance: DependencyManager, admin_user: UserInfo) -> None: + """Test the migration to deduplicate slugs and add constraints that prevent further duplicates.""" + + async def insert_project(session: AsyncSession, name: str) -> ULID: + proj_id = ULID() + now = datetime.now() + await session.execute( + sa.text( + "INSERT into " + "projects.projects(id, name, visibility, created_by_id, creation_date) " + f"VALUES ('{str(proj_id)}', '{name}', 'public', '{admin_user.id}', '{now.isoformat()}')" + ) + ) + return proj_id + + async def insert_slug( + session: AsyncSession, + slug: str, + namespace_id: ULID, + project_id: ULID | None = None, + data_connector_id: ULID | None = None, + ) -> None: + project_id_query = "NULL" if project_id is None else f"'{str(project_id)}'" + dc_id_query = "NULL" if data_connector_id is None else f"'{str(data_connector_id)}'" + await session.execute( + sa.text( + "INSERT into " + "common.entity_slugs(slug, namespace_id, project_id, data_connector_id) " + f"VALUES ('{slug}', '{str(namespace_id)}', {project_id_query}, {dc_id_query} )" + ) + ) + + async def insert_user_namespace(session: AsyncSession, user: UserInfo) -> None: + await session.execute( + sa.text(f"INSERT into users.users(keycloak_id) VALUES ('{user.namespace.underlying_resource_id}')") + ) + await session.execute( + sa.text( + "INSERT into " + "common.namespaces(id, slug, user_id) " + f"VALUES ('{user.namespace.id}', '{user.namespace.path.serialize()}', " + f"'{user.namespace.underlying_resource_id}' )" + ) + ) + + async def insert_data_connector(session: AsyncSession, name: str) -> ULID: + id = ULID() + now = datetime.now() + await session.execute( + sa.text( + "INSERT into " + "storage.data_connectors(id, name, visibility, storage_type, configuration, " + "source_path, target_path, created_by_id, readonly, creation_date) " + f"VALUES ('{str(id)}', '{name}', 'public', 's3', '{{}}', '/', '/', " + f"'{admin_user.namespace.underlying_resource_id}', FALSE, '{now.isoformat()}')" + ) + ) + return id + + run_migrations_for_app("common", "35ea9d8f54e8") + + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + await insert_user_namespace(session, admin_user) + # Two projects have duplicate slugs + p1_id = await insert_project(session, "p1") + p2_id = await insert_project(session, "p2") + await insert_slug(session, "p1", admin_user.namespace.id, p1_id) + await insert_slug(session, "p1", admin_user.namespace.id, p2_id) + # Two data connectors in the user namespace with duplicate slugs + dc1_id = await insert_data_connector(session, "dc1") + dc2_id = await insert_data_connector(session, "dc2") + await insert_slug(session, "d1", admin_user.namespace.id, None, dc1_id) + await insert_slug(session, "d1", admin_user.namespace.id, None, dc2_id) + # Two data connectors in a project namespace with duplicate slugs + p_for_dc_id = await insert_project(session, "p_for_dc") + await insert_slug(session, "p_for_dc", admin_user.namespace.id, p_for_dc_id, None) + p_dc1_id = await insert_data_connector(session, "p_dc1") + p_dc2_id = await insert_data_connector(session, "p_dc2") + await insert_slug(session, "dc_in_p", admin_user.namespace.id, p_for_dc_id, p_dc1_id) + await insert_slug(session, "dc_in_p", admin_user.namespace.id, p_for_dc_id, p_dc2_id) + + # There are duplicated slugs + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + res = await session.execute(sa.text("select distinct slug FROM common.entity_slugs")) + all_rows = res.all() + assert len(all_rows) == 4 # 3 duplicates + 1 slug for the project which holds data connectors + + run_migrations_for_app("common", "66e2f1271cf6") + + # One project's slug should be renamed and the two slugs are now distinct + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + res = await session.execute(sa.text("select distinct slug FROM common.entity_slugs")) + all_rows = res.all() + assert len(all_rows) == 7 # 3 x 2 dedepulicated slugs + 1 for the project which holds data connectors + + # Adding more duplicated slugs should error out + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + p3_id = await insert_project(session, "p3") + with pytest.raises(IntegrityError): + await insert_slug(session, "p1", admin_user.namespace.id, p3_id) + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + with pytest.raises(IntegrityError): + await insert_slug(session, "d1", admin_user.namespace.id, None, dc2_id) + async with app_manager_instance.config.db.async_session_maker() as session, session.begin(): + with pytest.raises(IntegrityError): + await insert_slug(session, "dc_in_p", admin_user.namespace.id, p_for_dc_id, p_dc2_id) diff --git a/test/bases/renku_data_services/data_api/test_namespaces.py b/test/bases/renku_data_services/data_api/test_namespaces.py index 99bbc1f9b..5e3ccd0ca 100644 --- a/test/bases/renku_data_services/data_api/test_namespaces.py +++ b/test/bases/renku_data_services/data_api/test_namespaces.py @@ -1,8 +1,31 @@ +import contextlib + import pytest from sqlalchemy import select - +from sqlalchemy.exc import IntegrityError + +from renku_data_services.authz.models import Visibility +from renku_data_services.base_models.core import ( + AuthenticatedAPIUser, + DataConnectorInProjectPath, + DataConnectorPath, + DataConnectorSlug, + NamespacePath, + ProjectPath, + ProjectSlug, +) from renku_data_services.data_api.dependencies import DependencyManager +from renku_data_services.data_connectors.models import ( + CloudStorageCore, + DataConnector, + DataConnectorPatch, + GlobalDataConnector, + UnsavedDataConnector, +) +from renku_data_services.errors.errors import ConflictError, MissingResourceError, ValidationError +from renku_data_services.namespace.models import UnsavedGroup from renku_data_services.namespace.orm import EntitySlugORM +from renku_data_services.project.models import Project, ProjectPatch, UnsavedProject from renku_data_services.users.models import UserInfo @@ -532,3 +555,197 @@ async def test_cleanup_with_group_deletion( # The data connector in the user namespace is still there _, response = await sanic_client.get(f"/api/data/data_connectors/{dc3_id}", headers=user_headers) assert response.status_code == 200 + + +def __project_patch_namespace(new_ns: str) -> ProjectPatch: + return ProjectPatch( + namespace=new_ns, + name=None, + slug=None, + visibility=None, + repositories=None, + description=None, + keywords=None, + documentation=None, + template_id=None, + is_template=None, + secrets_mount_directory=None, + ) + + +def __dc_patch_namespace(new_ns: ProjectPath | NamespacePath) -> DataConnectorPatch: + return DataConnectorPatch( + name=None, + namespace=new_ns, + slug=None, + visibility=None, + description=None, + keywords=None, + storage=None, + ) + + +def __dc_patch_slug(new_slug: str) -> DataConnectorPatch: + return DataConnectorPatch( + name=None, + namespace=None, + slug=new_slug, + visibility=None, + description=None, + keywords=None, + storage=None, + ) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "path1, path2, path2_patch, exc_type", + [ + ( + ProjectPath.from_strings("grp1", "prj1"), + ProjectPath.from_strings("grp2", "prj1"), + __project_patch_namespace("grp1"), + IntegrityError, + ), + ( + DataConnectorPath.from_strings("grp1", "dc1"), + DataConnectorPath.from_strings("grp2", "dc1"), + __dc_patch_namespace(NamespacePath.from_strings("grp1")), + ValidationError, + ), + ( + DataConnectorInProjectPath.from_strings("grp1", "prj1", "dc1"), + DataConnectorInProjectPath.from_strings("grp2", "prj1", "dc1"), + __dc_patch_namespace(ProjectPath.from_strings("grp1", "prj1")), + ValidationError, + ), + ( + DataConnectorInProjectPath.from_strings("grp1", "prj1", "dc1"), + DataConnectorInProjectPath.from_strings("grp1", "prj2", "dc1"), + __dc_patch_namespace(ProjectPath.from_strings("grp1", "prj1")), + ValidationError, + ), + ( + DataConnectorInProjectPath.from_strings("grp1", "prj1", "dc1"), + DataConnectorInProjectPath.from_strings("grp1", "prj1", "dc2"), + __dc_patch_slug("dc1"), + ValidationError, + ), + ( + DataConnectorPath.from_strings("grp1", "dc1"), + DataConnectorPath.from_strings("grp1", "dc2"), + __dc_patch_slug("dc1"), + ValidationError, + ), + ], +) +async def test_avoiding_slug_conflicts_with_updates( + sanic_client, + app_manager_instance: DependencyManager, + admin_user: UserInfo, + admin_headers: dict[str, str], + path1: ProjectPath | DataConnectorPath | DataConnectorInProjectPath, + path2: ProjectPath | DataConnectorPath | DataConnectorInProjectPath, + path2_patch: ProjectPatch | DataConnectorPatch, + exc_type: type[Exception] | None, +) -> None: + access_token = admin_headers.get("Authorization", "")[8:] + user = AuthenticatedAPIUser( + id=admin_user.id, access_token=access_token, email=admin_user.email or "user@google.com", is_admin=True + ) + storage_config = CloudStorageCore( + storage_type="", + configuration={"type": "s3", "endpoint": "http://s3.aws.com"}, + source_path="giab", + target_path="giab", + readonly=False, + ) + # Create groups + await app_manager_instance.group_repo.insert_group( + user, UnsavedGroup(name=path1.first.value, slug=path1.first.value) + ) + try: + await app_manager_instance.group_repo.get_group(user, path2.first) + except MissingResourceError: + await app_manager_instance.group_repo.insert_group( + user, UnsavedGroup(name=path2.first.value, slug=path2.first.value) + ) + prj2: Project | None = None + dc2: DataConnector | GlobalDataConnector | None = None + # Create first set of project and dc + if isinstance(path1.second, ProjectSlug): + await app_manager_instance.project_repo.insert_project( + user, + UnsavedProject( + name=path1.second.value, + slug=path1.second.value, + created_by=user.id, + visibility=Visibility.PUBLIC, + namespace=path1.parent().serialize() if isinstance(path1, ProjectPath) else path1.first.value, + ), + ) + dc_slug_1: DataConnectorSlug | None = None + if isinstance(path1, DataConnectorPath): + dc_slug_1 = path1.second + elif isinstance(path1, DataConnectorInProjectPath): + dc_slug_1 = path1.third + if dc_slug_1: + await app_manager_instance.data_connector_repo.insert_namespaced_data_connector( + user, + UnsavedDataConnector( + name=dc_slug_1.value, + slug=dc_slug_1.value, + visibility=Visibility.PUBLIC, + created_by=user.id, + storage=storage_config, + namespace=path1.parent(), + ), + ) + # Create second set of project and dc + if isinstance(path2.second, ProjectSlug): + try: + prj2 = await app_manager_instance.project_repo.insert_project( + user, + UnsavedProject( + name=path2.second.value, + slug=path2.second.value, + created_by=user.id, + visibility=Visibility.PUBLIC, + namespace=path2.first.value, + ), + ) + except ConflictError: + prj2 = await app_manager_instance.project_repo.get_project_by_namespace_slug( + user, path2.first.value, path2.second + ) + dc_slug_2: DataConnectorSlug | None = None + if isinstance(path2, DataConnectorPath): + dc_slug_2 = path2.second + elif isinstance(path2, DataConnectorInProjectPath): + dc_slug_2 = path2.third + if dc_slug_2: + try: + dc2 = await app_manager_instance.data_connector_repo.insert_namespaced_data_connector( + user, + UnsavedDataConnector( + name=dc_slug_2.value, + slug=dc_slug_2.value, + visibility=Visibility.PUBLIC, + created_by=user.id, + storage=storage_config, + namespace=path2.parent(), + ), + ) + except ConflictError: + assert not isinstance(path2, ProjectPath) + dc2 = await app_manager_instance.data_connector_repo.get_data_connector_by_slug(user, path2) + # Test patches + with pytest.raises(exc_type) if exc_type is not None else contextlib.nullcontext(): + if isinstance(path2_patch, ProjectPatch): + assert prj2 is not None + await app_manager_instance.project_repo.update_project(user, prj2.id, path2_patch) + elif isinstance(path2_patch, DataConnectorPatch): + assert dc2 is not None + await app_manager_instance.data_connector_repo.update_data_connector(user, dc2.id, path2_patch, dc2.etag) + else: + raise AssertionError("No update was performed") diff --git a/test/bases/renku_data_services/data_api/test_notebooks.py b/test/bases/renku_data_services/data_api/test_notebooks.py index d8261eee3..06c5d929a 100644 --- a/test/bases/renku_data_services/data_api/test_notebooks.py +++ b/test/bases/renku_data_services/data_api/test_notebooks.py @@ -14,9 +14,10 @@ from kr8s import NotFoundError from sanic_testing.testing import SanicASGITestClient +from renku_data_services.k8s.clients import K8sClusterClient from renku_data_services.k8s.constants import DEFAULT_K8S_CLUSTER -from renku_data_services.k8s.models import Cluster -from renku_data_services.k8s_watcher import K8sWatcher, k8s_object_handler +from renku_data_services.k8s.models import ClusterConnection +from renku_data_services.k8s.watcher import K8sWatcher, k8s_object_handler from renku_data_services.notebooks.api.classes.k8s_client import JupyterServerV1Alpha1Kr8s from renku_data_services.notebooks.constants import JUPYTER_SESSION_GVK @@ -173,6 +174,22 @@ def fake_gitlab(mocker, fake_gitlab_projects, fake_gitlab_project_info): return gitlab +async def wait_for(sanic_client: SanicASGITestClient, user_headers, server_name: str, max_timeout: int = 20): + res = None + waited = 0 + for t in list(range(0, max_timeout)): + waited = t + 1 + _, res = await sanic_client.get("/api/data/notebooks/servers", headers=user_headers) + if res.status_code == 200 and res.json["servers"].get(server_name) is not None: + return + await asyncio.sleep(1) # wait a bit for k8s events to be processed in the background + + raise Exception( + f"Timeout reached while waiting for {server_name} to be ready." + f" res {res.json if res is not None else None}, waited {waited} seconds" + ) + + @pytest.mark.asyncio async def test_version(sanic_client: SanicASGITestClient, user_headers): _, res = await sanic_client.get("/api/data/notebooks/version", headers=user_headers) @@ -241,11 +258,12 @@ async def test_check_docker_image(sanic_client: SanicASGITestClient, user_header assert res.status_code == expected_status_code, res.text +@pytest.mark.skip() class TestNotebooks(ClusterRequired): @pytest.fixture(scope="class", autouse=True) def amalthea(self, cluster, app_manager) -> Generator[None, None]: if cluster is not None: - setup_amalthea("amalthea-js", "amalthea", "0.12.2", cluster) + setup_amalthea("amalthea-js", "amalthea", "0.21.0", cluster) app_manager.config.nb_config._kr8s_api.push(asyncio.run(kr8s.asyncio.api())) yield @@ -253,13 +271,15 @@ def amalthea(self, cluster, app_manager) -> Generator[None, None]: @pytest_asyncio.fixture(scope="class", autouse=True) async def k8s_watcher(self, amalthea, app_manager) -> AsyncGenerator[None, None]: - clusters = [ - Cluster( - id=DEFAULT_K8S_CLUSTER, - namespace=app_manager.config.nb_config.k8s.renku_namespace, - api=app_manager.config.nb_config._kr8s_api.current, + clusters = { + DEFAULT_K8S_CLUSTER: K8sClusterClient( + ClusterConnection( + id=DEFAULT_K8S_CLUSTER, + namespace=app_manager.config.nb_config.k8s.renku_namespace, + api=app_manager.config.nb_config._kr8s_api.current, + ) ) - ] + } # sleep to give amalthea a chance to create the CRDs, otherwise the watcher can error out await asyncio.sleep(1) @@ -267,7 +287,7 @@ async def k8s_watcher(self, amalthea, app_manager) -> AsyncGenerator[None, None] handler=k8s_object_handler( app_manager.config.nb_config.k8s_db_cache, app_manager.metrics, app_manager.rp_repo ), - clusters={c.id: c for c in clusters}, + clusters=clusters, kinds=[JUPYTER_SESSION_GVK], db_cache=app_manager.config.nb_config.k8s_db_cache, ) @@ -306,6 +326,7 @@ async def test_log_retrieval( server_name = "unknown_server" if server_exists: server_name = jupyter_server.name + await wait_for(sanic_client, authenticated_user_headers, server_name) await asyncio.sleep(2) # wait a bit for k8s events to be processed in the background _, res = await sanic_client.get(f"/api/data/notebooks/logs/{server_name}", headers=authenticated_user_headers) @@ -351,6 +372,7 @@ async def test_patch_server( server_name = "unknown_server" if server_exists: server_name = jupyter_server.name + await wait_for(sanic_client, authenticated_user_headers, server_name) await asyncio.sleep(2) # wait a bit for k8s events to be processed in the background _, res = await sanic_client.patch( diff --git a/test/bases/renku_data_services/data_api/test_platform_config.py b/test/bases/renku_data_services/data_api/test_platform_config.py index eaca8525f..06a87e218 100644 --- a/test/bases/renku_data_services/data_api/test_platform_config.py +++ b/test/bases/renku_data_services/data_api/test_platform_config.py @@ -1,11 +1,16 @@ """Tests for platform config blueprints.""" +import urllib.parse + import pytest from sanic_testing.testing import SanicASGITestClient from renku_data_services.data_api.dependencies import DependencyManager from test.bases.renku_data_services.data_api.utils import merge_headers +DUMMY_ULID = "01FZ8RSZ9KAKYQZ1ZZZZZZZZZZ" +DUMMY_ULID_2 = "11FZ8RSZ9KAKYQZ1ZZZZZZZZZZ" + @pytest.mark.asyncio async def test_get_platform_config(app_manager: DependencyManager, sanic_client: SanicASGITestClient) -> None: @@ -48,3 +53,286 @@ async def test_patch_platform_config_unauthorized( _, res = await sanic_client.patch("/api/data/platform/config", headers=headers, json=payload) assert res.status_code == 403, res.text + + +@pytest.mark.asyncio +async def test_delete_redirect(sanic_client: SanicASGITestClient, admin_headers: dict[str, str]) -> None: + payload = {"source_url": "/projects/ns/project-slug", "target_url": f"/p/{DUMMY_ULID}"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 201, f"status code {res.status_code} != 201" + assert res.json is not None + url_redirect_plan = res.json + assert url_redirect_plan.get("source_url") == "/projects/ns/project-slug" + assert url_redirect_plan.get("target_url") == f"/p/{DUMMY_ULID}" + assert url_redirect_plan.get("etag") != "" + + encoded_url = urllib.parse.quote_plus("/projects/ns/project-slug") + delete_headers = merge_headers(admin_headers, {"If-Match": url_redirect_plan["etag"]}) + _, res = await sanic_client.delete(f"/api/data/platform/redirects/{encoded_url}", headers=delete_headers) + assert res.status_code == 204, f"status code {res.status_code} != 204" + + parameters = {"page": 1, "per_page": 5} + _, res = await sanic_client.get("/api/data/platform/redirects", headers=admin_headers, params=parameters) + + assert res.status_code == 200, f"status code {res.status_code} != 200" + assert res.json is not None + assert res.headers["page"] == "1" + assert res.headers["per-page"] == "5" + assert res.headers["total"] == "0" + assert res.headers["total-pages"] == "0" + + +@pytest.mark.asyncio +async def test_get_redirects(sanic_client: SanicASGITestClient, admin_headers: dict[str, str]) -> None: + parameters = {"page": 1, "per_page": 5} + headers = admin_headers + _, res = await sanic_client.get("/api/data/platform/redirects", headers=headers, params=parameters) + + assert res.status_code == 200, f"status code {res.status_code} != 200" + assert res.json is not None + assert res.headers["page"] == "1" + assert res.headers["per-page"] == "5" + assert res.headers["total"] == "0" + assert res.headers["total-pages"] == "0" + + +@pytest.mark.asyncio +async def test_get_redirect(sanic_client: SanicASGITestClient, admin_headers: dict[str, str]) -> None: + url = "/projects/ns/project-slug" + encoded_url = urllib.parse.quote_plus(url) + _, res = await sanic_client.get(f"/api/data/platform/redirects/{encoded_url}") + assert res.status_code == 404, res.text + assert res.json is not None + error_object = res.json + assert ( + error_object.get("error").get("message") + == f"A redirect for '{url}' does not exist or you do not have access to it." + ) + # getting registered redirects is tested in test_post_redirect + + +@pytest.mark.asyncio +async def test_patch_redirect(sanic_client: SanicASGITestClient, admin_headers: dict[str, str]) -> None: + payload = {"source_url": "/projects/ns/project-slug", "target_url": f"/p/{DUMMY_ULID}"} + + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 201, res.text + assert res.json is not None + url_redirect_plan = res.json + assert url_redirect_plan.get("source_url") == "/projects/ns/project-slug" + assert url_redirect_plan.get("target_url") == f"/p/{DUMMY_ULID}" + assert url_redirect_plan.get("etag") != "" + + encoded_url = urllib.parse.quote_plus("/projects/ns/project-slug") + _, res = await sanic_client.get(f"/api/data/platform/redirects/{encoded_url}") + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("source_url") == "/projects/ns/project-slug" + assert res.json.get("target_url") == f"/p/{DUMMY_ULID}" + assert res.json.get("etag") == url_redirect_plan.get("etag") + + patch_headers = merge_headers(admin_headers, {"If-Match": url_redirect_plan["etag"]}) + payload = {"target_url": f"/p/{DUMMY_ULID_2}"} + _, res = await sanic_client.patch( + f"/api/data/platform/redirects/{encoded_url}", headers=patch_headers, json=payload + ) + assert res.status_code == 200, res.text + assert res.json is not None + updated_plan = res.json + assert updated_plan.get("source_url") == "/projects/ns/project-slug" + assert updated_plan.get("target_url") == f"/p/{DUMMY_ULID_2}" + assert updated_plan.get("etag") != "" + + _, res = await sanic_client.get(f"/api/data/platform/redirects/{encoded_url}") + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("source_url") == "/projects/ns/project-slug" + assert res.json.get("target_url") == f"/p/{DUMMY_ULID_2}" + + +@pytest.mark.asyncio +async def test_patch_redirect_non_existant( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], +) -> None: + payload = {"target_url": f"/p/{DUMMY_ULID}"} + encoded_url = urllib.parse.quote_plus("/projects/ns/project-slug") + patch_headers = merge_headers(admin_headers, {"If-Match": "some-etag"}) + _, res = await sanic_client.patch( + f"/api/data/platform/redirects/{encoded_url}", headers=patch_headers, json=payload + ) + # should not allow patching a redirect that does not exist + assert res.status_code == 404, res.status_code + assert res.json is not None + assert ( + res.json.get("error").get("message") == "A redirect for source URL '/projects/ns/project-slug' does not exist." + ) + + +@pytest.mark.asyncio +async def test_patch_redirect_unauthorized( + sanic_client: SanicASGITestClient, admin_headers: dict[str, str], user_headers: dict[str, str] +) -> None: + payload = {"target_url": f"/p/{DUMMY_ULID}"} + encoded_url = urllib.parse.quote_plus("/projects/ns/project-slug") + patch_headers = merge_headers(admin_headers, {"If-Match": "some-etag"}) + _, res = await sanic_client.patch( + f"/api/data/platform/redirects/{encoded_url}", headers=patch_headers, json=payload + ) + # should not allow patching a redirect that does not exist + assert res.status_code == 404, res.status_code + assert res.json is not None + assert ( + res.json.get("error").get("message") == "A redirect for source URL '/projects/ns/project-slug' does not exist." + ) + + payload = {"source_url": "/projects/ns/project-slug", "target_url": f"/p/{DUMMY_ULID}"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 201, f"status code {res.status_code} != 201" + url_redirect_plan = res.json + _, res = await sanic_client.get(f"/api/data/platform/redirects/{encoded_url}") + assert res.status_code == 200, res.text + assert res.json is not None + assert res.json.get("source_url") == "/projects/ns/project-slug" + assert res.json.get("target_url") == f"/p/{DUMMY_ULID}" + assert res.json.get("etag") == url_redirect_plan.get("etag") + + patch_headers = merge_headers(user_headers, {"If-Match": url_redirect_plan["etag"]}) + payload = {"target_url": "/p/ns2/project-slug2"} + _, res = await sanic_client.patch( + f"/api/data/platform/redirects/{encoded_url}", headers=patch_headers, json=payload + ) + assert res.status_code == 403, res.status_code + assert res.json is not None + assert res.json.get("error").get("message") == "You do not have the required permissions for this operation." + + +@pytest.mark.asyncio +async def test_post_redirect(sanic_client: SanicASGITestClient, admin_headers: dict[str, str]) -> None: + payload = {"source_url": "/projects/ns/project-slug", "target_url": f"/p/{DUMMY_ULID}"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 201, f"status code {res.status_code} != 201" + assert res.json is not None + url_redirect_plan_1 = res.json + assert url_redirect_plan_1.get("source_url") == "/projects/ns/project-slug" + assert url_redirect_plan_1.get("target_url") == f"/p/{DUMMY_ULID}" + assert url_redirect_plan_1.get("etag") != "" + + payload = {"source_url": "https://gitlab.renkulab.io/foo", "target_url": "https://github.com/foo"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 201, f"status code {res.status_code} != 201" + assert res.json is not None + url_redirect_plan_2 = res.json + assert url_redirect_plan_2.get("source_url") == "https://gitlab.renkulab.io/foo" + assert url_redirect_plan_2.get("target_url") == "https://github.com/foo" + assert url_redirect_plan_2.get("etag") != "" + + parameters = {"page": 1, "per_page": 5} + _, res = await sanic_client.get("/api/data/platform/redirects", headers=admin_headers, params=parameters) + + assert res.status_code == 200, f"status code {res.status_code} != 200" + assert res.json is not None + assert res.headers["page"] == "1" + assert res.headers["per-page"] == "5" + assert res.headers["total"] == "2" + assert res.headers["total-pages"] == "1" + + redirects = res.json + assert redirects[0].get("etag") == url_redirect_plan_1.get("etag") + + encoded_url = urllib.parse.quote_plus("/projects/ns/project-slug") + _, res = await sanic_client.get(f"/api/data/platform/redirects/{encoded_url}") + assert res.status_code == 200, f"status code {res.status_code} != 200" + assert res.json is not None + assert res.json.get("source_url") == "/projects/ns/project-slug" + assert res.json.get("target_url") == f"/p/{DUMMY_ULID}" + + +@pytest.mark.asyncio +async def test_post_redirect_input_validation(sanic_client: SanicASGITestClient, admin_headers: dict[str, str]) -> None: + payload = {"source_url": "/foo/ns/project-slug", "target_url": "/p/ns/project-slug"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The source URL must start with /projects/." + + payload = {"source_url": "/projects/ns/project-slug", "target_url": "/p/ns/project-slug"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The target URL path must match the pattern /p/ULID." + + payload = {"source_url": "http://gitlab.renkulab.io/foo", "target_url": "http://github.com"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The source URL must use HTTPS." + + payload = {"source_url": "https://foo.bar/foo", "target_url": "http://github.com"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The source URL host must be gitlab.renkulab.io." + + payload = {"source_url": "https://gitlab.renkulab.io/foo//", "target_url": "http://github.com"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The source URL path is not canonical." + + payload = {"source_url": "https://gitlab.renkulab.io/foo/../../", "target_url": "http://github.com"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The source URL path is not canonical." + + payload = {"source_url": "https://gitlab.renkulab.io/foo", "target_url": "http://github.com/bar"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The target URL must use HTTPS." + + payload = {"source_url": "https://gitlab.renkulab.io/foo", "target_url": "https://github.com/bar?query=1"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The target URL must not include parameters, a query, or a fragment." + + payload = {"source_url": "https://gitlab.renkulab.io/foo", "target_url": "https://github.com/bar#fragment"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=admin_headers, json=payload) + assert res.status_code == 422, f"status code {res.status_code} != 422" + assert res.json is not None + assert res.json.get("error").get("message") == "The target URL must not include parameters, a query, or a fragment." + + +@pytest.mark.asyncio +async def test_post_redirect_duplicate(sanic_client: SanicASGITestClient, admin_headers: dict[str, str]) -> None: + headers = admin_headers + payload = {"source_url": "/projects/ns/project-slug", "target_url": f"/p/{DUMMY_ULID}"} + + _, res = await sanic_client.post("/api/data/platform/redirects", headers=headers, json=payload) + assert res.status_code == 201, res.text + assert res.json is not None + url_redirect_plan = res.json + assert url_redirect_plan.get("source_url") == "/projects/ns/project-slug" + assert url_redirect_plan.get("target_url") == f"/p/{DUMMY_ULID}" + assert url_redirect_plan.get("etag") != "" + + payload = {"source_url": "/projects/ns/project-slug", "target_url": f"/p/{DUMMY_ULID_2}"} + _, res = await sanic_client.post("/api/data/platform/redirects", headers=headers, json=payload) + assert res.status_code == 409, (res.status_code, res.text) + assert res.json is not None + url_redirect_plan = res.json + assert url_redirect_plan.get("error").get("code") == 1409 + + +@pytest.mark.asyncio +async def test_post_redirect_unauthorized(sanic_client: SanicASGITestClient, user_headers: dict[str, str]) -> None: + _, res = await sanic_client.get("/api/data/platform/redirects") + + headers = user_headers + payload = {"source_url": "/projects/ns/project-slug", "target_url": f"/p/{DUMMY_ULID}"} + + _, res = await sanic_client.post("/api/data/platform/redirects", headers=headers, json=payload) + + assert res.status_code == 403, res.status_code diff --git a/test/bases/renku_data_services/data_api/test_resource_pools.py b/test/bases/renku_data_services/data_api/test_resource_pools.py index 76058a41e..b2646a798 100644 --- a/test/bases/renku_data_services/data_api/test_resource_pools.py +++ b/test/bases/renku_data_services/data_api/test_resource_pools.py @@ -70,7 +70,7 @@ async def test_resource_pool_creation( payload["cluster_id"] = None _, res = await create_rp(payload, sanic_client) - assert res.status_code == expected_status_code + assert res.status_code == expected_status_code, res.text @pytest.mark.parametrize( @@ -109,6 +109,54 @@ async def test_resource_pool_creation_with_cluster_ids( assert res.json["cluster"]["id"] == payload["cluster_id"] +@pytest.mark.parametrize( + "payload,expected_status_code", + resource_pool_payload, +) +@pytest.mark.asyncio +async def test_resource_pool_creation_with_remote( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], + payload: dict[str, Any], + expected_status_code: int, +) -> None: + # Create a provider + provider_payload = { + "id": "some-provider", + "kind": "gitlab", + "client_id": "some-client-id", + "display_name": "my oauth2 application", + "scope": "api", + "url": "https://example.org", + } + _, res = await sanic_client.post("/api/data/oauth2/providers", headers=admin_headers, json=provider_payload) + assert res.status_code == 201, res.text + + if "cluster_id" in payload: + payload["cluster_id"] = None + payload["default"] = False + payload["public"] = False + payload["remote"] = { + "kind": "firecrest", + "provider_id": provider_payload["id"], + "api_url": "https://example.org", + "system_name": "my-system", + } + + _, res = await create_rp(payload, sanic_client) + assert res.status_code == expected_status_code, res.text + + if res.status_code >= 200 and res.status_code < 400: + assert res.json is not None + rp = res.json + assert rp.get("remote") == { + "kind": "firecrest", + "provider_id": provider_payload["id"], + "api_url": "https://example.org", + "system_name": "my-system", + } + + @pytest.mark.asyncio async def test_resource_pool_quotas( sanic_client: SanicASGITestClient, valid_resource_pool_payload: dict[str, Any] @@ -1182,3 +1230,62 @@ async def test_resource_pools_delete( else: _, res = await sanic_client.delete(url) assert res.status_code == expected_status_code, res.text + + +@pytest.mark.asyncio +async def test_resource_pool_patch_remote( + sanic_client: SanicASGITestClient, + admin_headers: dict[str, str], +) -> None: + # Create a provider + provider_payload = { + "id": "some-provider", + "kind": "gitlab", + "client_id": "some-client-id", + "display_name": "my oauth2 application", + "scope": "api", + "url": "https://example.org", + } + _, res = await sanic_client.post("/api/data/oauth2/providers", headers=admin_headers, json=provider_payload) + assert res.status_code == 201, res.text + + # First, create a non-remote resource pool + payload = deepcopy(resource_pool_payload) + if "cluster_id" in payload: + payload["cluster_id"] = None + + _, res = await create_rp(payload, sanic_client) + assert res.status_code == 201, res.text + rp_id = res.json["id"] + + # Patch with the remote configuration + patch = { + "default": False, + "public": False, + "remote": { + "kind": "firecrest", + "provider_id": provider_payload["id"], + "api_url": "https://example.org", + "system_name": "my-system", + }, + } + + _, res = await sanic_client.patch(f"/api/data/resource_pools/{rp_id}", headers=admin_headers, json=patch) + assert res.status_code == 200, res.text + assert res.json is not None + rp = res.json + assert rp.get("remote") == { + "kind": "firecrest", + "provider_id": provider_payload["id"], + "api_url": "https://example.org", + "system_name": "my-system", + } + + # Patch to reset the resource pool + patch = {"default": False, "public": False, "remote": {}} + + _, res = await sanic_client.patch(f"/api/data/resource_pools/{rp_id}", headers=admin_headers, json=patch) + assert res.status_code == 200, res.text + assert res.json is not None + rp = res.json + assert "remote" not in rp diff --git a/test/bases/renku_data_services/data_api/test_schemathesis.py b/test/bases/renku_data_services/data_api/test_schemathesis.py index f53faaa79..c4e4a522b 100644 --- a/test/bases/renku_data_services/data_api/test_schemathesis.py +++ b/test/bases/renku_data_services/data_api/test_schemathesis.py @@ -51,7 +51,7 @@ async def apispec(sanic_client: SanicASGITestClient) -> BaseOpenAPISchema: @schemathesis.hook def filter_headers(context: HookContext, headers: dict[str, str] | None) -> bool: op = context.operation - if headers is not None and op.method.upper() == "PATCH": + if headers is not None and (op.method.upper() == "PATCH" or op.method.upper() == "DELETE"): try: [h.encode("ascii") for h in headers.values()] except UnicodeEncodeError: diff --git a/test/bases/renku_data_services/data_api/test_secret.py b/test/bases/renku_data_services/data_api/test_secret.py index 2e6f48e3f..6fde573fb 100644 --- a/test/bases/renku_data_services/data_api/test_secret.py +++ b/test/bases/renku_data_services/data_api/test_secret.py @@ -11,7 +11,7 @@ from ulid import ULID from renku_data_services.base_models.core import InternalServiceAdmin, ServiceAdminId -from renku_data_services.secrets.core import rotate_encryption_keys, rotate_single_encryption_key +from renku_data_services.k8s.models import K8sSecret from renku_data_services.secrets.models import Secret, SecretKind from renku_data_services.secrets_storage_api.dependencies import DependencyManager from renku_data_services.users import apispec @@ -309,17 +309,19 @@ async def test_secret_encryption_decryption( _, response = await secrets_sanic_client.post("/api/secrets/kubernetes", headers=user_headers, json=payload) assert response.status_code == 201 - assert "test-secret" in secrets_storage_app_manager.core_client.secrets - k8s_secret = secrets_storage_app_manager.core_client.secrets["test-secret"].data - assert k8s_secret.keys() == {"secret-1", "secret-2"} + assert "test-secret" in secrets_storage_app_manager.secret_client.secrets + k8s_secret: K8sSecret = secrets_storage_app_manager.secret_client.secrets["test-secret"] + secrets = k8s_secret.manifest.get("data", {}) + + assert secrets.keys() == {"secret-1", "secret-2"} _, response = await sanic_client.get("/api/data/user/secret_key", headers=user_headers) assert response.status_code == 200 assert "secret_key" in response.json secret_key = response.json["secret_key"] - assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret-1"])) == "value-1" - assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret-2"])) == "value-2" + assert decrypt_string(secret_key.encode(), "user", b64decode(secrets["secret-1"])) == "value-1" + assert decrypt_string(secret_key.encode(), "user", b64decode(secrets["secret-2"])) == "value-2" @pytest.mark.asyncio @@ -359,19 +361,20 @@ async def test_secret_encryption_decryption_with_key_mapping( _, response = await secrets_sanic_client.post("/api/secrets/kubernetes", headers=user_headers, json=payload) assert response.status_code == 201 - assert "test-secret" in secrets_storage_app_manager.core_client.secrets - k8s_secret = secrets_storage_app_manager.core_client.secrets["test-secret"].data - assert k8s_secret.keys() == {"access_key_id", "secret_access_key", "secret-3-one", "secret-3-two"} + assert "test-secret" in secrets_storage_app_manager.secret_client.secrets + k8s_secret: K8sSecret = secrets_storage_app_manager.secret_client.secrets["test-secret"] + secrets = k8s_secret.manifest.get("data", {}) + assert secrets.keys() == {"access_key_id", "secret_access_key", "secret-3-one", "secret-3-two"} _, response = await sanic_client.get("/api/data/user/secret_key", headers=user_headers) assert response.status_code == 200 assert "secret_key" in response.json secret_key = response.json["secret_key"] - assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["access_key_id"])) == "value-1" - assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret_access_key"])) == "value-2" - assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret-3-one"])) == "value-3" - assert decrypt_string(secret_key.encode(), "user", b64decode(k8s_secret["secret-3-two"])) == "value-3" + assert decrypt_string(secret_key.encode(), "user", b64decode(secrets["access_key_id"])) == "value-1" + assert decrypt_string(secret_key.encode(), "user", b64decode(secrets["secret_access_key"])) == "value-2" + assert decrypt_string(secret_key.encode(), "user", b64decode(secrets["secret-3-one"])) == "value-3" + assert decrypt_string(secret_key.encode(), "user", b64decode(secrets["secret-3-two"])) == "value-3" # NOTE: Test missing secret_id in key mapping payload["key_mapping"] = {secret1_id: "access_key_id"} @@ -415,7 +418,7 @@ async def test_single_secret_rotation(): data_connector_ids=[], ) - rotated_secret = await rotate_single_encryption_key(secret, user_id, new_key, old_key) + rotated_secret = await secret.rotate_single_encryption_key(user_id, new_key, old_key) assert rotated_secret is not None with pytest.raises(ValueError): @@ -428,7 +431,7 @@ async def test_single_secret_rotation(): # ensure that rotating again does nothing - result = await rotate_single_encryption_key(rotated_secret, user_id, new_key, old_key) + result = await rotated_secret.rotate_single_encryption_key(user_id, new_key, old_key) assert result is None @@ -443,11 +446,10 @@ async def test_secret_rotation( new_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) admin = InternalServiceAdmin(id=ServiceAdminId.secrets_rotation) - await rotate_encryption_keys( + await secrets_storage_app_manager.user_secrets_repo.rotate_encryption_keys( admin, new_key, secrets_storage_app_manager.config.secrets.private_key, - secrets_storage_app_manager.user_secrets_repo, batch_size=5, ) diff --git a/test/bases/renku_data_services/data_api/test_sessions.py b/test/bases/renku_data_services/data_api/test_sessions.py index 7ef585363..760c0df09 100644 --- a/test/bases/renku_data_services/data_api/test_sessions.py +++ b/test/bases/renku_data_services/data_api/test_sessions.py @@ -1493,3 +1493,29 @@ async def test_patch_build(sanic_client: SanicASGITestClient, user_headers, crea assert response.json.get("created_at") is not None assert response.json.get("status") == "cancelled" assert response.json.get("result") is None + + +@pytest.mark.asyncio +async def test_patch_strip_prefix( + sanic_client: SanicASGITestClient, admin_headers, create_project, create_session_launcher +) -> None: + project = await create_project("Project 1") + launcher = await create_session_launcher("Launcher 1", project_id=project["id"]) + launcher_id = launcher["id"] + assert "environment" in launcher + env = launcher["environment"] + assert not env["strip_path_prefix"] + + payload = { + "environment": { + "strip_path_prefix": True, + }, + } + + _, res = await sanic_client.patch(f"/api/data/session_launchers/{launcher_id}", headers=admin_headers, json=payload) + + assert res.status_code == 200, res.text + assert res.json is not None + assert "environment" in res.json + env = res.json["environment"] + assert env.get("strip_path_prefix") diff --git a/test/bases/renku_data_services/data_api/test_storage.py b/test/bases/renku_data_services/data_api/test_storage.py index cc6c121a8..5277bd2da 100644 --- a/test/bases/renku_data_services/data_api/test_storage.py +++ b/test/bases/renku_data_services/data_api/test_storage.py @@ -12,7 +12,7 @@ from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app from renku_data_services.storage.rclone import RCloneValidator -from renku_data_services.storage.rclone_patches import BANNED_STORAGE, OAUTH_PROVIDERS +from renku_data_services.storage.rclone_patches import BANNED_SFTP_OPTIONS, BANNED_STORAGE, OAUTH_PROVIDERS from renku_data_services.utils.core import get_openbis_session_token from test.utils import SanicReusableASGITestClient @@ -250,6 +250,21 @@ async def storage_test_client( 422, "", ), + ( + { + "project_id": "123456", + "name": "mystorage", + "configuration": { + "type": "sftp", + "host": "myhost", + "ssh": "ssh", # passing in banned option + }, + "source_path": "bucket/myfolder", + "target_path": "my/target", + }, + 422, + "", + ), ], ) @pytest.mark.asyncio @@ -509,6 +524,39 @@ async def test_storage_patch_unauthorized(storage_test_client, valid_storage_pay assert res.status_code == 403, res.text +@pytest.mark.asyncio +async def test_storage_patch_banned_option(storage_test_client, valid_storage_payload) -> None: + storage_test_client, _ = storage_test_client + # NOTE: The keycloak dummy client used to authorize the storage patch requests only has info + # on a user with name Admin Doe, using a different user will fail with a 401 error. + access_token = json.dumps({"is_admin": False, "id": "some-id", "full_name": "Admin Doe"}) + payload = dict(valid_storage_payload) + payload["configuration"] = { + "type": "sftp", + "host": "myhost", + } + _, res = await storage_test_client.post( + "/api/data/storage", + headers={"Authorization": f"bearer {access_token}"}, + data=json.dumps(payload), + ) + assert res.status_code == 201 + assert res.json["storage"]["storage_type"] == "sftp" + storage_id = res.json["storage"]["storage_id"] + + _, res = await storage_test_client.patch( + f"/api/data/storage/{storage_id}", + headers={"Authorization": f"bearer {access_token}"}, + data=json.dumps( + { + "configuration": {"key_file": "my_key"}, + } + ), + ) + assert res.status_code == 422 + assert "key_file option is not allowed" in res.text + + @pytest.mark.asyncio async def test_storage_obscure(storage_test_client) -> None: storage_test_client, _ = storage_test_client @@ -666,9 +714,20 @@ async def test_storage_schema_patches(storage_test_client, snapshot) -> None: oauth_providers = [s for s in schema if s["prefix"] in OAUTH_PROVIDERS] assert all(o["name"] != "client_id" and o["name"] != "client_secret" for p in oauth_providers for o in p["options"]) + # check the OAUTH_PROVIDERS list + not_exists = set(p for p in OAUTH_PROVIDERS if p not in set(s["prefix"] for s in schema)) + assert not_exists == set() + # check custom webdav storage is added assert any(s["prefix"] == "polybox" for s in schema) assert any(s["prefix"] == "switchDrive" for s in schema) + + # check that unsafe SFTP options are removed + sftp = next((e for e in schema if e["prefix"] == "sftp"), None) + assert sftp + assert all(o["name"] not in BANNED_SFTP_OPTIONS for o in sftp["options"]) + + # snapshot the schema assert schema == snapshot diff --git a/test/components/renku_data_services/connected_services/test_db.py b/test/components/renku_data_services/connected_services/test_db.py new file mode 100644 index 000000000..df4897c9b --- /dev/null +++ b/test/components/renku_data_services/connected_services/test_db.py @@ -0,0 +1,240 @@ +"""Testing db routines.""" + +from dataclasses import dataclass +from typing import cast + +import pytest + +from renku_data_services.base_models.core import APIUser +from renku_data_services.connected_services.db import ConnectedServicesRepository, Image +from renku_data_services.connected_services.models import ( + ConnectionStatus, + OAuth2Client, + ProviderKind, + UnsavedOAuth2Client, +) +from renku_data_services.connected_services.orm import OAuth2ConnectionORM +from renku_data_services.data_api.dependencies import DependencyManager +from renku_data_services.migrations.core import run_migrations_for_app +from renku_data_services.users.db import UserRepo +from renku_data_services.users.models import UserInfo + +github_image = Image.from_path("ghcr.io/sdsc/test") +gitlab_image = Image.from_path("registry.gitlab.com/sdsc/test") + + +@dataclass +class SetupData: + admin: APIUser + admin_info: UserInfo + user1: APIUser + user1_info: UserInfo + user2: APIUser + user2_info: UserInfo + deps: DependencyManager + + @property + def connected_repo(self) -> ConnectedServicesRepository: + return self.deps.connected_services_repo + + async def insert_client(self, id: str, kind: ProviderKind, registry_url: str) -> OAuth2Client: + c = UnsavedOAuth2Client( + id=id, + app_slug=f"{id}-slug", + url=f"https://{id}.com", + kind=kind, + client_id="cid", + client_secret="secret", + display_name=f"{kind} {id}", + scope="read:registry", + use_pkce=False, + image_registry_url=registry_url if registry_url != "" else None, + ) + return await self.connected_repo.insert_oauth2_client(self.admin, c) + + async def connect( + self, user: APIUser, provider_id: str, status: ConnectionStatus = ConnectionStatus.connected + ) -> OAuth2ConnectionORM: + repo = self.connected_repo + provider = await repo.get_oauth2_client(provider_id, self.admin) + + async with self.deps.config.db.async_session_maker() as session, session.begin(): + if user.id is None: + raise Exception(f"user {user} has no id") + conn = OAuth2ConnectionORM( + user_id=user.id, + client_id=provider.id, + token={"access_token": "bla"}, + status=status, + state=None, + code_verifier=None, + next_url=None, + ) + session.add(conn) + return conn + + +async def setup_users(app_manager_instance: DependencyManager) -> SetupData: + user_repo: UserRepo = app_manager_instance.kc_user_repo + admin = APIUser(id="admin1", first_name="Ad", last_name="Min", is_admin=True, access_token="token_a") + user1 = APIUser(id="id-123", first_name="Mads", last_name="Pedersen", access_token="token_u1") + user2 = APIUser(id="id-234", first_name="Wout", last_name="van Art", access_token="token_u2") + admin_info = cast(UserInfo, await user_repo.get_or_create_user(admin, str(admin.id))) + user1_info = cast(UserInfo, await user_repo.get_or_create_user(user1, str(user1.id))) + user2_info = cast(UserInfo, await user_repo.get_or_create_user(user2, str(user2.id))) + return SetupData(admin, admin_info, user1, user1_info, user2, user2_info, app_manager_instance) + + +@pytest.mark.asyncio +async def test_get_provider_for_image_no_provider(app_manager_instance: DependencyManager) -> None: + run_migrations_for_app("common") + setup = await setup_users(app_manager_instance) + db = setup.connected_repo + p = await db.get_provider_for_image(setup.user1, github_image) + assert p is None + + +@pytest.mark.asyncio +async def test_get_provider_for_image_no_connection(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + + p = await setup.connected_repo.get_provider_for_image(setup.user1, github_image) + assert p is not None + assert p.connected_user is None + assert p.registry_url == "https://ghcr.io" + assert p.provider.id == "github" + + +@pytest.mark.asyncio +async def test_get_provider_for_image_no_connection_for_user(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + client = await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + await setup.connect(setup.user1, client.id) + + p = await setup.connected_repo.get_provider_for_image(setup.user2, github_image) + assert p is not None + assert p.connected_user is None + assert p.registry_url == "https://ghcr.io" + assert p.provider.id == client.id + + +@pytest.mark.asyncio +async def test_get_provider_for_image_provider_with_connection(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + client = await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + await setup.connect(setup.user1, client.id) + + p = await setup.connected_repo.get_provider_for_image(setup.user1, github_image) + assert p is not None + assert p.connected_user is not None + assert p.registry_url == "https://ghcr.io" + assert p.provider.id == "github" + assert p.connected_user.connection.status == ConnectionStatus.connected + assert p.is_connected() + + +@pytest.mark.asyncio +async def test_get_provider_for_image_provider_with_pending_connection(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + client = await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + await setup.connect(setup.user1, client.id, ConnectionStatus.pending) + + p = await setup.connected_repo.get_provider_for_image(setup.user1, github_image) + assert p is not None + assert p.connected_user is not None + assert p.registry_url == "https://ghcr.io" + assert p.provider.id == "github" + assert p.connected_user.connection.status == ConnectionStatus.pending + assert not p.is_connected() + + +@pytest.mark.asyncio +async def test_get_provider_for_image_multiple_user(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + client = await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + conn1 = await setup.connect(setup.user1, client.id) + await setup.connect(setup.user2, client.id) + + p = await setup.connected_repo.get_provider_for_image(setup.user1, github_image) + assert p is not None + assert p.connected_user is not None + assert p.registry_url == "https://ghcr.io" + assert p.provider.id == "github" + assert p.connected_user.connection.id == conn1.id + assert p.is_connected() + + +@pytest.mark.asyncio +async def test_get_provider_for_image_multiple_options(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + client1 = await setup.insert_client("github1", ProviderKind.github, "https://ghcr.io") + client2 = await setup.insert_client("github2", ProviderKind.github, "https://ghcr.io") + await setup.connect(setup.user1, client1.id) + conn2 = await setup.connect(setup.user1, client2.id) + + p = await setup.connected_repo.get_provider_for_image(setup.user1, github_image) + assert p is not None + assert p.connected_user is not None + assert p.registry_url == "https://ghcr.io" + assert p.provider.id == "github2" + assert p.connected_user.connection.id == conn2.id + assert p.is_connected() + + +@pytest.mark.asyncio +async def test_get_provider_for_image_no_registry_url(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + + p = await setup.connected_repo.get_provider_for_image(setup.user1, gitlab_image) + assert p is None + + +@pytest.mark.asyncio +async def test_get_provider_for_image_unsupported_provider(app_manager_instance) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + await setup.insert_client("google-drive", ProviderKind.drive, "") + + p = await setup.connected_repo.get_provider_for_image(setup.user1, gitlab_image) + assert p is None + + +@pytest.mark.asyncio +async def test_delete_owned_connection(app_manager_instance: DependencyManager) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + client = await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + conn = await setup.connect(setup.user2, client.id) + + result = await setup.connected_repo.delete_oauth2_connection(setup.user2, str(conn.id)) + assert result + + +@pytest.mark.asyncio +async def test_not_delete_non_owned_connection(app_manager_instance: DependencyManager) -> None: + run_migrations_for_app("common") + + setup = await setup_users(app_manager_instance) + client = await setup.insert_client("github", ProviderKind.github, "https://ghcr.io") + conn = await setup.connect(setup.user2, client.id) + + result = await setup.connected_repo.delete_oauth2_connection(setup.user1, str(conn.id)) + assert not result diff --git a/test/components/renku_data_services/connected_services/test_encryption.py b/test/components/renku_data_services/connected_services/test_encryption.py index 3c345467f..7c0e5b9b7 100644 --- a/test/components/renku_data_services/connected_services/test_encryption.py +++ b/test/components/renku_data_services/connected_services/test_encryption.py @@ -4,7 +4,7 @@ from sqlalchemy import select from renku_data_services.base_models import APIUser -from renku_data_services.connected_services import apispec +from renku_data_services.connected_services import models from renku_data_services.connected_services import orm as schemas from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.migrations.core import run_migrations_for_app @@ -39,9 +39,10 @@ async def test_token_encryption( async def test_client_secret_encryption(app_manager_instance: DependencyManager, admin_user: APIUser) -> None: run_migrations_for_app("common") connected_services_repo = app_manager_instance.connected_services_repo - new_client = apispec.ProviderPost( + new_client = models.UnsavedOAuth2Client( id="provider", - kind=apispec.ProviderKind.gitlab, + app_slug="", + kind=models.ProviderKind.gitlab, client_id="CLIENT_ID", client_secret="CLIENT_SECRET", # nosec display_name="My Provider", diff --git a/test/components/renku_data_services/k8s/test_k8s_adapter.py b/test/components/renku_data_services/k8s/test_k8s_adapter.py index 89f6188b2..3fd308ba9 100644 --- a/test/components/renku_data_services/k8s/test_k8s_adapter.py +++ b/test/components/renku_data_services/k8s/test_k8s_adapter.py @@ -17,7 +17,7 @@ from renku_data_services.crc import models from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient -from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s.db import QuotaRepository from renku_data_services.notebooks.api.classes.auth import RenkuTokens from renku_data_services.notebooks.api.classes.k8s_client import NotebookK8sClient from renku_data_services.notebooks.util.kubernetes_ import find_env_var @@ -26,20 +26,20 @@ def test_dummy_core_client() -> None: core_client = DummyCoreClient({}, {}) - quotas = core_client.list_namespaced_resource_quota("default") - assert len(quotas.items) == 0 + quotas = core_client.list_resource_quota("default", "") + assert len(quotas) == 0 assert len(core_client.quotas) == 0 quota_name = "test" quota = client.V1ResourceQuota( metadata={"name": quota_name}, spec=client.V1ResourceQuotaSpec(hard={"requests.cpu": 1}) ) - core_client.create_namespaced_resource_quota("default", quota) - quotas = core_client.list_namespaced_resource_quota("default") - assert len(quotas.items) == 1 + core_client.create_resource_quota("default", quota) + quotas = core_client.list_resource_quota("default", "") + assert len(quotas) == 1 assert len(core_client.quotas) == 1 - core_client.delete_namespaced_resource_quota(quota_name, "default") - quotas = core_client.list_namespaced_resource_quota("default") - assert len(quotas.items) == 0 + core_client.delete_resource_quota(quota_name, "default") + quotas = core_client.list_resource_quota("default", "") + assert len(quotas) == 0 assert len(core_client.quotas) == 0 @@ -50,7 +50,7 @@ def test_dummy_scheduling_client() -> None: pc = client.V1PriorityClass(global_default=False, value=100, metadata=client.V1ObjectMeta(name=pc_name)) scheduling_client.create_priority_class(pc) assert len(scheduling_client.pcs) == 1 - scheduling_client.delete_priority_class(pc_name) + scheduling_client.delete_priority_class(pc_name, body=client.V1DeleteOptions()) assert len(scheduling_client.pcs) == 0 diff --git a/test/components/renku_data_services/notebooks/test_notebooks_session_patching.py b/test/components/renku_data_services/notebooks/test_notebooks_session_patching.py new file mode 100644 index 000000000..47f89aeb0 --- /dev/null +++ b/test/components/renku_data_services/notebooks/test_notebooks_session_patching.py @@ -0,0 +1,26 @@ +from dataclasses import dataclass + +from renku_data_services.notebooks.core_sessions import _make_patch_spec_list + + +def test_make_patch_spec_list() -> None: + @dataclass(eq=True) + class MyResource: + name: str + contents: str + + existing = [ + MyResource(name="first", contents="first content"), + MyResource(name="second", contents="second content"), + ] + updated = [ + MyResource(name="second", contents="second content patched"), + MyResource(name="third", contents="new third content"), + ] + patch_list = _make_patch_spec_list(existing=existing, updated=updated) + + assert patch_list == [ + MyResource(name="first", contents="first content"), + MyResource(name="second", contents="second content patched"), + MyResource(name="third", contents="new third content"), + ] diff --git a/test/components/renku_data_services/notebooks/test_notebooks_utils.py b/test/components/renku_data_services/notebooks/test_notebooks_utils.py new file mode 100644 index 000000000..e47bb62b9 --- /dev/null +++ b/test/components/renku_data_services/notebooks/test_notebooks_utils.py @@ -0,0 +1,181 @@ +import pytest + +from renku_data_services.notebooks.crs import ( + NodeAffinity, +) +from renku_data_services.notebooks.utils import intersect_node_affinities + +intersect_node_affinities_test_cases: list[tuple[NodeAffinity, NodeAffinity, NodeAffinity]] = [ + ( + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + {"matchExpressions": [{"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}]} + ] + } + } + ), + NodeAffinity.model_validate({}), + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + {"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}, + ] + } + ] + } + } + ), + ), + ( + NodeAffinity.model_validate({}), + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + {"matchExpressions": [{"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}]} + ] + } + } + ), + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + {"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}, + ] + } + ] + } + } + ), + ), + ( + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + {"matchExpressions": [{"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}]} + ] + } + } + ), + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [{"matchExpressions": [{"key": "renku.io/high-memory", "operator": "Exists"}]}] + } + } + ), + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + {"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}, + {"key": "renku.io/high-memory", "operator": "Exists"}, + ] + } + ] + } + } + ), + ), + ( + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + {"matchExpressions": [{"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}]} + ] + }, + "preferredDuringSchedulingIgnoredDuringExecution": [ + { + "weight": 2, + "preference": { + "matchExpressions": [ + { + "key": "location", + "operator": "In", + "values": ["zone-A"], + } + ], + }, + } + ], + } + ), + NodeAffinity.model_validate( + { + "preferredDuringSchedulingIgnoredDuringExecution": [ + { + "weight": 1, + "preference": { + "matchExpressions": [ + { + "key": "disktype", + "operator": "In", + "values": ["ssd"], + } + ], + }, + } + ] + } + ), + NodeAffinity.model_validate( + { + "requiredDuringSchedulingIgnoredDuringExecution": { + "nodeSelectorTerms": [ + { + "matchExpressions": [ + {"key": "renku.io/node-purpose", "operator": "In", "values": ["user"]}, + ] + } + ] + }, + "preferredDuringSchedulingIgnoredDuringExecution": [ + { + "weight": 2, + "preference": { + "matchExpressions": [ + { + "key": "location", + "operator": "In", + "values": ["zone-A"], + } + ], + }, + }, + { + "weight": 1, + "preference": { + "matchExpressions": [ + { + "key": "disktype", + "operator": "In", + "values": ["ssd"], + } + ], + }, + }, + ], + } + ), + ), +] + + +@pytest.mark.parametrize("left,right,expected", intersect_node_affinities_test_cases) +def test_intersect_node_affinities(left: NodeAffinity, right: NodeAffinity, expected: NodeAffinity) -> None: + result = intersect_node_affinities(left, right) + + assert result.model_dump(mode="json") == expected.model_dump(mode="json") + assert result == expected diff --git a/test/conftest.py b/test/conftest.py index 99007d3be..0e8a855e2 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -219,6 +219,7 @@ async def app_manager( monkeysession.setenv("MAX_PINNED_PROJECTS", "5") monkeysession.setenv("NB_SERVER_OPTIONS__DEFAULTS_PATH", "server_defaults.json") monkeysession.setenv("NB_SERVER_OPTIONS__UI_CHOICES_PATH", "server_options.json") + monkeysession.setenv("V1_SESSIONS_ENABLED", "true") dm = TestDependencyManager.from_env(dummy_users) diff --git a/test/utils.py b/test/utils.py index 6e4958aa7..ba4461cdd 100644 --- a/test/utils.py +++ b/test/utils.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import os import typing from collections.abc import Callable @@ -18,17 +20,18 @@ from renku_data_services.connected_services.db import ConnectedServicesRepository from renku_data_services.crc import models as rp_models from renku_data_services.crc.db import ClusterRepository, ResourcePoolRepository, UserRepository -from renku_data_services.data_api.config import Config as AppConfig +from renku_data_services.data_api.config import Config from renku_data_services.data_api.dependencies import DependencyManager from renku_data_services.data_connectors.db import DataConnectorRepository, DataConnectorSecretRepository from renku_data_services.db_config.config import DBConfig from renku_data_services.git.gitlab import DummyGitlabAPI from renku_data_services.k8s.clients import DummyCoreClient, DummySchedulingClient -from renku_data_services.k8s.quota import QuotaRepository +from renku_data_services.k8s.db import QuotaRepository from renku_data_services.message_queue.db import ReprovisioningRepository from renku_data_services.metrics.db import MetricsRepository from renku_data_services.namespace.db import GroupRepository -from renku_data_services.platform.db import PlatformRepository +from renku_data_services.notebooks.api.classes.data_service import GitProviderHelper +from renku_data_services.platform.db import PlatformRepository, UrlRedirectRepository from renku_data_services.project.db import ( ProjectMemberRepository, ProjectMigrationRepository, @@ -50,7 +53,7 @@ class StackSessionMaker: - def __init__(self, parent: "DBConfigStack") -> None: + def __init__(self, parent: DBConfigStack) -> None: self.parent = parent def __call__(self, *args: Any, **kwds: Any) -> AsyncSession: @@ -164,10 +167,10 @@ class TestDependencyManager(DependencyManager): @classmethod def from_env( cls, dummy_users: list[user_preferences_models.UnsavedUserInfo], prefix: str = "" - ) -> "DependencyManager": + ) -> DependencyManager: """Create a config from environment variables.""" db = DBConfigStack.from_env() - config = AppConfig.from_env(db) + config = Config.from_env(db) user_store: base_models.UserStore authenticator: base_models.Authenticator gitlab_authenticator: base_models.Authenticator @@ -258,16 +261,17 @@ def from_env( session_maker=config.db.async_session_maker, encryption_key=config.secrets.encryption_key, async_oauth2_client_class=cls.async_oauth2_client_class, - internal_gitlab_url=config.gitlab_url, ) git_repositories_repo = GitRepositoriesRepository( session_maker=config.db.async_session_maker, connected_services_repo=connected_services_repo, internal_gitlab_url=config.gitlab_url, + enable_internal_gitlab=config.enable_internal_gitlab, ) platform_repo = PlatformRepository( session_maker=config.db.async_session_maker, ) + url_redirect_repo = UrlRedirectRepository(session_maker=config.db.async_session_maker, authz=authz) data_connector_repo = DataConnectorRepository( session_maker=config.db.async_session_maker, authz=authz, @@ -294,6 +298,7 @@ def from_env( cluster_repo = ClusterRepository(session_maker=config.db.async_session_maker) metrics_repo = MetricsRepository(session_maker=config.db.async_session_maker) metrics_mock = MagicMock(spec=MetricsService) + git_provider_helper = GitProviderHelper(connected_services_repo, "", "", "", config.enable_internal_gitlab) return cls( config=config, authenticator=authenticator, @@ -328,6 +333,8 @@ def from_env( shipwright_client=None, authz=authz, low_level_user_secrets_repo=low_level_user_secrets_repo, + url_redirect_repo=url_redirect_repo, + git_provider_helper=git_provider_helper, ) def __post_init__(self) -> None: