diff --git a/server/.env.example b/server/.env.example index 1e6031424..c89d43f0c 100644 --- a/server/.env.example +++ b/server/.env.example @@ -1,7 +1,31 @@ debug=false url_prefix=/api secret_key=postgres -database_url=postgresql://postgres:123456@localhost:5432/postgres # Chat Share Secret Key CHAT_SHARE_SECRET_KEY=put-your-secret-key-here CHAT_SHARE_SALT=put-your-encode-salt-here + +# Configuration if not running in docker +# database_url=postgresql://postgres:123456@localhost:5432/eigent +# redis_url=redis://localhost:6379/0 +# celery_broker_url=redis://localhost:6379/0 +# celery_result_url=redis://localhost:6379/0 +# SESSION_REDIS_URL=redis://localhost:6379/1 + +# Trigger Schedule Poller Configuration +# ENABLE_TRIGGER_SCHEDULE_POLLER_TASK: Enable/disable scheduled trigger polling +ENABLE_TRIGGER_SCHEDULE_POLLER_TASK=true +# TRIGGER_SCHEDULE_POLLER_INTERVAL: Polling interval in minutes +TRIGGER_SCHEDULE_POLLER_INTERVAL=1 +# TRIGGER_SCHEDULE_POLLER_BATCH_SIZE: Number of triggers to fetch per poll +TRIGGER_SCHEDULE_POLLER_BATCH_SIZE=100 +# TRIGGER_SCHEDULE_MAX_DISPATCH_PER_TICK: Max triggers to dispatch per tick (0 = unlimited) +TRIGGER_SCHEDULE_MAX_DISPATCH_PER_TICK=0 +# ENABLE_EXECUTION_TIMEOUT_CHECKER: Enable/disable execution timeout checking +ENABLE_EXECUTION_TIMEOUT_CHECKER=true +# EXECUTION_TIMEOUT_CHECKER_INTERVAL: check_execution_timeouts interval in minutes +EXECUTION_TIMEOUT_CHECKER_INTERVAL=1 +# EXECUTION_PENDING_TIMEOUT_SECONDS: Timeout for pending executions (default 60 seconds) +EXECUTION_PENDING_TIMEOUT_SECONDS=60 +# EXECUTION_RUNNING_TIMEOUT_SECONDS: Timeout for running executions (default 600 seconds / 10 minutes) +EXECUTION_RUNNING_TIMEOUT_SECONDS=600 \ No newline at end of file diff --git a/server/Dockerfile b/server/Dockerfile index 6705bdcb9..6beb26fc2 100644 --- a/server/Dockerfile +++ b/server/Dockerfile @@ -4,6 +4,19 @@ FROM ghcr.io/astral-sh/uv:python3.12-bookworm-slim # Install the project into `/app` WORKDIR /app +# Install Git and build dependencies (required for git-based dependencies and Rust packages like tiktoken) +RUN apt-get update -o Acquire::Retries=3 && apt-get install -y --no-install-recommends \ + git \ + curl \ + build-essential \ + gcc \ + python3-dev \ + && curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y \ + && rm -rf /var/lib/apt/lists/* + +# Add Rust to PATH +ENV PATH="/root/.cargo/bin:$PATH" + # Disable bytecode transfer during compilation to avoid EMFILE during build on low nofile limits ENV UV_COMPILE_BYTECODE=0 @@ -15,11 +28,6 @@ ENV UV_PYTHON_INSTALL_MIRROR=https://registry.npmmirror.com/-/binary/python-buil ARG database_url ENV database_url=$database_url -RUN apt-get update -o Acquire::Retries=3 && apt-get install -y --no-install-recommends \ - gcc \ - python3-dev \ - && rm -rf /var/lib/apt/lists/* - # Copy dependency files first COPY server/pyproject.toml server/uv.lock ./ @@ -49,6 +57,10 @@ ENV PATH="/app/.venv/bin:$PATH" COPY server/start.sh /app/start.sh RUN sed -i 's/\r$//' /app/start.sh && chmod +x /app/start.sh +# Make Celery scripts executable +RUN sed -i 's/\r$//' /app/celery/worker/start && chmod +x /app/celery/worker/start +RUN sed -i 's/\r$//' /app/celery/beat/start && chmod +x /app/celery/beat/start + # Reset the entrypoint, don't invoke `uv` ENTRYPOINT [] diff --git a/server/alembic/env.py b/server/alembic/env.py index 58f1283e4..4c7ae10ba 100644 --- a/server/alembic/env.py +++ b/server/alembic/env.py @@ -44,6 +44,7 @@ auto_import("app.model.config") auto_import("app.model.chat") auto_import("app.model.provider") +auto_import("app.model.trigger") # target_metadata = mymodel.Base.metadata target_metadata = SQLModel.metadata @@ -97,7 +98,7 @@ def run_migrations_offline() -> None: script output. """ - url = config.get_main_option("sqlalchemy.url") + url = env_not_empty("database_url") context.configure( url=url, target_metadata=target_metadata, diff --git a/server/alembic/versions/2026_02_06_0440-9464b9d89de7_feat_trigger.py b/server/alembic/versions/2026_02_06_0440-9464b9d89de7_feat_trigger.py new file mode 100644 index 000000000..ed1d0d877 --- /dev/null +++ b/server/alembic/versions/2026_02_06_0440-9464b9d89de7_feat_trigger.py @@ -0,0 +1,113 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""feat-trigger + +Revision ID: 9464b9d89de7 +Revises: add_timestamp_to_chat_step +Create Date: 2026-02-06 04:40:17.623286 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +import sqlmodel.sql.sqltypes +from app.type.trigger_types import ExecutionStatus +from app.type.trigger_types import ExecutionType +from app.type.trigger_types import ListenerType +from app.type.trigger_types import RequestType +from app.type.trigger_types import TriggerStatus +from app.type.trigger_types import TriggerType +from sqlalchemy_utils.types import ChoiceType + +# revision identifiers, used by Alembic. +revision: str = '9464b9d89de7' +down_revision: Union[str, None] = 'add_timestamp_to_chat_step' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.create_table('trigger', + sa.Column('deleted_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), + sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('user_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('name', sqlmodel.sql.sqltypes.AutoString(length=100), nullable=False), + sa.Column('description', sqlmodel.sql.sqltypes.AutoString(length=1000), nullable=False), + sa.Column('trigger_type', ChoiceType(choices=TriggerType, impl=sa.String()), nullable=True), + sa.Column('status', ChoiceType(choices=TriggerStatus, impl=sa.String()), nullable=True), + sa.Column('webhook_url', sa.String(length=1024), nullable=True), + sa.Column('webhook_method', ChoiceType(choices=RequestType, impl=sa.String()), nullable=True), + sa.Column('custom_cron_expression', sa.String(length=100), nullable=True), + sa.Column('listener_type', ChoiceType(choices=ListenerType, impl=sa.String()), nullable=True), + sa.Column('agent_model', sa.String(length=100), nullable=True), + sa.Column('task_prompt', sqlmodel.sql.sqltypes.AutoString(length=1500), nullable=True), + sa.Column('config', sa.JSON(), nullable=True), + sa.Column('max_executions_per_hour', sa.Integer(), nullable=True), + sa.Column('max_executions_per_day', sa.Integer(), nullable=True), + sa.Column('is_single_execution', sa.Boolean(), nullable=False), + sa.Column('last_executed_at', sa.DateTime(), nullable=True), + sa.Column('next_run_at', sa.DateTime(), nullable=True), + sa.Column('last_execution_status', sa.String(length=50), nullable=True), + sa.Column('consecutive_failures', sa.Integer(), nullable=False), + sa.Column('auto_disabled_at', sa.DateTime(), nullable=True), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_trigger_next_run_at'), 'trigger', ['next_run_at'], unique=False) + op.create_index(op.f('ix_trigger_project_id'), 'trigger', ['project_id'], unique=False) + op.create_index(op.f('ix_trigger_user_id'), 'trigger', ['user_id'], unique=False) + op.create_table('trigger_execution', + sa.Column('deleted_at', sa.DateTime(), nullable=True), + sa.Column('created_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), + sa.Column('updated_at', sa.TIMESTAMP(), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True), + sa.Column('id', sa.Integer(), nullable=False), + sa.Column('trigger_id', sa.Integer(), nullable=False), + sa.Column('execution_id', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('execution_type', ChoiceType(choices=ExecutionType, impl=sa.String()), nullable=True), + sa.Column('status', ChoiceType(choices=ExecutionStatus, impl=sa.String()), nullable=True), + sa.Column('started_at', sa.DateTime(), nullable=True), + sa.Column('completed_at', sa.DateTime(), nullable=True), + sa.Column('duration_seconds', sa.Float(), nullable=True), + sa.Column('input_data', sa.JSON(), nullable=True), + sa.Column('output_data', sa.JSON(), nullable=True), + sa.Column('error_message', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('attempts', sa.Integer(), nullable=False), + sa.Column('max_retries', sa.Integer(), nullable=False), + sa.Column('tokens_used', sa.Integer(), nullable=True), + sa.Column('tools_executed', sa.JSON(), nullable=True), + sa.ForeignKeyConstraint(['trigger_id'], ['trigger.id'], ), + sa.PrimaryKeyConstraint('id') + ) + op.create_index(op.f('ix_trigger_execution_execution_id'), 'trigger_execution', ['execution_id'], unique=True) + op.create_index(op.f('ix_trigger_execution_trigger_id'), 'trigger_execution', ['trigger_id'], unique=False) + # ### end Alembic commands ### + + +def downgrade() -> None: + """Downgrade schema.""" + # ### commands auto generated by Alembic - please adjust! ### + op.drop_index(op.f('ix_trigger_execution_trigger_id'), table_name='trigger_execution') + op.drop_index(op.f('ix_trigger_execution_execution_id'), table_name='trigger_execution') + op.drop_table('trigger_execution') + op.drop_index(op.f('ix_trigger_user_id'), table_name='trigger') + op.drop_index(op.f('ix_trigger_project_id'), table_name='trigger') + op.drop_index(op.f('ix_trigger_next_run_at'), table_name='trigger') + op.drop_table('trigger') + # ### end Alembic commands ### diff --git a/server/app/__init__.py b/server/app/__init__.py index 36c64b1d4..0cda6dd9c 100644 --- a/server/app/__init__.py +++ b/server/app/__init__.py @@ -10,10 +10,36 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= - -from fastapi import FastAPI -from fastapi_pagination import add_pagination - -api = FastAPI(swagger_ui_parameters={"persistAuthorization": True}) -add_pagination(api) +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from contextlib import asynccontextmanager +from fastapi import FastAPI +from fastapi_pagination import add_pagination +from fastapi_limiter import FastAPILimiter +from app.component.environment import env_or_fail +from redis import asyncio as aioredis +import logging + +logger = logging.getLogger("server_app") + +@asynccontextmanager +async def lifespan(app: FastAPI): + """Application lifespan manager for startup/shutdown events.""" + # Startup: Initialize rate limiter with Redis + redis_url = env_or_fail("redis_url") + redis_connection = aioredis.from_url(redis_url, encoding="utf-8", decode_responses=True) + await FastAPILimiter.init(redis_connection) + logger.info("FastAPI Limiter initialized with Redis") + + yield + + # Shutdown: Close Redis connection + await FastAPILimiter.close() + logger.info("FastAPI Limiter closed") + +# Add lifespan for ratelimiter setup +api = FastAPI( + swagger_ui_parameters={"persistAuthorization": True}, + lifespan=lifespan +) +add_pagination(api) diff --git a/server/app/component/celery.py b/server/app/component/celery.py new file mode 100644 index 000000000..c5c28d4f5 --- /dev/null +++ b/server/app/component/celery.py @@ -0,0 +1,52 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from celery import Celery +from app.component.environment import env_or_fail, env + +celery = Celery( + __name__, + broker=env_or_fail("celery_broker_url"), + backend=env_or_fail("celery_result_url") +) + +# Configure Celery to autodiscover tasks +celery.conf.imports = [ + "app.schedule.trigger_schedule_task", +] + +# Configure Celery Beat schedule +ENABLE_TRIGGER_SCHEDULE_POLLER = env("ENABLE_TRIGGER_SCHEDULE_POLLER_TASK", "true").lower() == "true" +TRIGGER_SCHEDULE_POLLER_INTERVAL = int(env("TRIGGER_SCHEDULE_POLLER_INTERVAL", "1")) # in minutes + +ENABLE_EXECUTION_TIMEOUT_CHECKER = env("ENABLE_EXECUTION_TIMEOUT_CHECKER", "true").lower() == "true" +EXECUTION_TIMEOUT_CHECKER_INTERVAL = int(env("EXECUTION_TIMEOUT_CHECKER_INTERVAL", "1")) # in minutes + +celery.conf.beat_schedule = {} + +if ENABLE_TRIGGER_SCHEDULE_POLLER: + celery.conf.beat_schedule["poll-trigger-schedules"] = { + "task": "app.schedule.trigger_schedule_task.poll_trigger_schedules", + "schedule": TRIGGER_SCHEDULE_POLLER_INTERVAL * 60.0, # Convert minutes to seconds + "options": {"queue": "poll_trigger_schedules"}, + } + +if ENABLE_EXECUTION_TIMEOUT_CHECKER: + celery.conf.beat_schedule["check-execution-timeouts"] = { + "task": "app.schedule.trigger_schedule_task.check_execution_timeouts", + "schedule": EXECUTION_TIMEOUT_CHECKER_INTERVAL * 60.0, # Convert minutes to seconds + "options": {"queue": "check_execution_timeouts"}, + } + +celery.conf.timezone = "UTC" \ No newline at end of file diff --git a/server/app/component/redis_utils.py b/server/app/component/redis_utils.py new file mode 100644 index 000000000..ccafd7870 --- /dev/null +++ b/server/app/component/redis_utils.py @@ -0,0 +1,500 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Redis utilities for managing WebSocket sessions and real-time data.""" + +import redis +from redis import Redis +from typing import Optional, Dict, Any, Set, Callable +from datetime import datetime, timezone +import json +import logging +import os +import asyncio + +logger = logging.getLogger("server_redis_utils") + + +class RedisSessionManager: + """Manages WebSocket sessions in Redis for scalability and persistence.""" + + def __init__(self, redis_url: Optional[str] = None): + """Initialize Redis connection. + + Args: + redis_url: Redis connection URL. If None, reads from environment. + """ + self.redis_url = redis_url or os.getenv("SESSION_REDIS_URL", "redis://localhost:6379/0") + self._client: Optional[Redis] = None + + # Key prefixes + self.SESSION_PREFIX = "ws:session:" + self.USER_SESSIONS_PREFIX = "ws:user:sessions:" + self.PENDING_PREFIX = "ws:pending:" + self.PUBSUB_CHANNEL = "ws:executions" + self.DELIVERY_CONFIRMATION_PREFIX = "ws:delivery:" + + # TTL for sessions (24 hours) + self.SESSION_TTL = 86400 + # TTL for delivery confirmations (5 minutes) + self.DELIVERY_TTL = 300 + + # Pub/Sub + self._pubsub = None + self._pubsub_client: Optional[Redis] = None + + @property + def client(self) -> Redis: + """Get or create Redis client.""" + if self._client is None: + try: + self._client = redis.from_url( + self.redis_url, + decode_responses=True, + socket_connect_timeout=5, + socket_timeout=5 + ) + # Test connection + self._client.ping() + logger.info("Redis connection established", extra={"url": self.redis_url}) + except Exception as e: + logger.error("Failed to connect to Redis", extra={"error": str(e)}, exc_info=True) + raise + return self._client + + def store_session( + self, + session_id: str, + user_id: str, + metadata: Optional[Dict[str, Any]] = None + ) -> bool: + """Store a WebSocket session in Redis. + + Args: + session_id: Unique session identifier + user_id: User ID associated with the session + metadata: Additional metadata to store + + Returns: + True if successful, False otherwise + """ + try: + session_data = { + "user_id": user_id, + "session_id": session_id, + "connected_at": datetime.now(timezone.utc).isoformat(), + **(metadata or {}) + } + + session_key = f"{self.SESSION_PREFIX}{session_id}" + user_sessions_key = f"{self.USER_SESSIONS_PREFIX}{user_id}" + + # Store session data + self.client.setex( + session_key, + self.SESSION_TTL, + json.dumps(session_data) + ) + + # Add session to user's session set + self.client.sadd(user_sessions_key, session_id) + self.client.expire(user_sessions_key, self.SESSION_TTL) + + logger.debug("Session stored in Redis", extra={ + "session_id": session_id, + "user_id": user_id + }) + return True + + except Exception as e: + logger.error("Failed to store session in Redis", extra={ + "session_id": session_id, + "error": str(e) + }, exc_info=True) + return False + + def get_session(self, session_id: str) -> Optional[Dict[str, Any]]: + """Get session data from Redis. + + Args: + session_id: Session identifier + + Returns: + Session data dictionary or None if not found + """ + try: + session_key = f"{self.SESSION_PREFIX}{session_id}" + data = self.client.get(session_key) + + if data: + return json.loads(data) + return None + + except Exception as e: + logger.error("Failed to get session from Redis", extra={ + "session_id": session_id, + "error": str(e) + }) + return None + + def remove_session(self, session_id: str) -> bool: + """Remove a session from Redis. + + Args: + session_id: Session identifier + + Returns: + True if successful, False otherwise + """ + try: + # Get session data to find user_id + session = self.get_session(session_id) + if not session: + return False + + user_id = session.get("user_id") + + # Remove session data + session_key = f"{self.SESSION_PREFIX}{session_id}" + self.client.delete(session_key) + + # Remove from user's session set + if user_id: + user_sessions_key = f"{self.USER_SESSIONS_PREFIX}{user_id}" + self.client.srem(user_sessions_key, session_id) + + # Remove pending executions + pending_key = f"{self.PENDING_PREFIX}{session_id}" + self.client.delete(pending_key) + + logger.debug("Session removed from Redis", extra={ + "session_id": session_id, + "user_id": user_id + }) + return True + + except Exception as e: + logger.error("Failed to remove session from Redis", extra={ + "session_id": session_id, + "error": str(e) + }, exc_info=True) + return False + + def get_user_sessions(self, user_id: str) -> Set[str]: + """Get all active session IDs for a user. + + Args: + user_id: User identifier + + Returns: + Set of session IDs + """ + try: + user_sessions_key = f"{self.USER_SESSIONS_PREFIX}{user_id}" + sessions = self.client.smembers(user_sessions_key) + return sessions if sessions else set() + + except Exception as e: + logger.error("Failed to get user sessions from Redis", extra={ + "user_id": user_id, + "error": str(e) + }) + return set() + + def add_pending_execution(self, session_id: str, execution_id: str) -> bool: + """Add a pending execution to a session. + + Args: + session_id: Session identifier + execution_id: Execution identifier + + Returns: + True if successful, False otherwise + """ + try: + pending_key = f"{self.PENDING_PREFIX}{session_id}" + self.client.sadd(pending_key, execution_id) + self.client.expire(pending_key, self.SESSION_TTL) + return True + + except Exception as e: + logger.error("Failed to add pending execution", extra={ + "session_id": session_id, + "execution_id": execution_id, + "error": str(e) + }) + return False + + def remove_pending_execution(self, session_id: str, execution_id: str) -> bool: + """Remove a pending execution from a session. + + Args: + session_id: Session identifier + execution_id: Execution identifier + + Returns: + True if successful, False otherwise + """ + try: + pending_key = f"{self.PENDING_PREFIX}{session_id}" + self.client.srem(pending_key, execution_id) + return True + + except Exception as e: + logger.error("Failed to remove pending execution", extra={ + "session_id": session_id, + "execution_id": execution_id, + "error": str(e) + }) + return False + + def get_pending_executions(self, session_id: str) -> Set[str]: + """Get all pending executions for a session. + + Args: + session_id: Session identifier + + Returns: + Set of execution IDs + """ + try: + pending_key = f"{self.PENDING_PREFIX}{session_id}" + pending = self.client.smembers(pending_key) + return pending if pending else set() + + except Exception as e: + logger.error("Failed to get pending executions", extra={ + "session_id": session_id, + "error": str(e) + }) + return set() + + def update_session_ttl(self, session_id: str) -> bool: + """Refresh the TTL for a session. + + Args: + session_id: Session identifier + + Returns: + True if successful, False otherwise + """ + try: + session_key = f"{self.SESSION_PREFIX}{session_id}" + self.client.expire(session_key, self.SESSION_TTL) + + pending_key = f"{self.PENDING_PREFIX}{session_id}" + self.client.expire(pending_key, self.SESSION_TTL) + + return True + + except Exception as e: + logger.error("Failed to update session TTL", extra={ + "session_id": session_id, + "error": str(e) + }) + return False + + def confirm_delivery(self, execution_id: str, session_id: str) -> bool: + """Confirm that a message was delivered to a WebSocket client. + + Args: + execution_id: The execution ID that was delivered + session_id: The session ID that received the message + + Returns: + True if confirmation was stored, False otherwise + """ + try: + confirmation_key = f"{self.DELIVERY_CONFIRMATION_PREFIX}{execution_id}" + confirmation_data = json.dumps({ + "execution_id": execution_id, + "session_id": session_id, + "delivered_at": datetime.now(timezone.utc).isoformat() + }) + self.client.setex(confirmation_key, self.DELIVERY_TTL, confirmation_data) + logger.debug("Delivery confirmed", extra={ + "execution_id": execution_id, + "session_id": session_id + }) + return True + except Exception as e: + logger.error("Failed to confirm delivery", extra={ + "execution_id": execution_id, + "session_id": session_id, + "error": str(e) + }) + return False + + async def wait_for_delivery( + self, + execution_id: str, + timeout: float = 10.0, + poll_interval: float = 0.1 + ) -> Optional[Dict[str, Any]]: + """Wait for delivery confirmation of an execution. + + Args: + execution_id: The execution ID to wait for + timeout: Maximum time to wait in seconds + poll_interval: Time between checks in seconds + + Returns: + Confirmation data if delivered, None if timeout + """ + confirmation_key = f"{self.DELIVERY_CONFIRMATION_PREFIX}{execution_id}" + elapsed = 0.0 + + while elapsed < timeout: + try: + data = self.client.get(confirmation_key) + if data: + # Clean up the confirmation key + self.client.delete(confirmation_key) + return json.loads(data) + except Exception as e: + logger.error("Error checking delivery confirmation", extra={ + "execution_id": execution_id, + "error": str(e) + }) + + await asyncio.sleep(poll_interval) + elapsed += poll_interval + + logger.warning("Delivery confirmation timeout", extra={ + "execution_id": execution_id, + "timeout": timeout + }) + return None + + def has_active_sessions_for_user(self, user_id: str) -> bool: + """Check if a user has any active WebSocket sessions. + + Args: + user_id: User identifier + + Returns: + True if user has active sessions, False otherwise + """ + try: + sessions = self.get_user_sessions(user_id) + return len(sessions) > 0 + except Exception as e: + logger.error("Failed to check user sessions", extra={ + "user_id": user_id, + "error": str(e) + }) + return False + + def close(self): + """Close Redis connection.""" + if self._pubsub: + self._pubsub.close() + self._pubsub = None + if self._pubsub_client: + self._pubsub_client.close() + self._pubsub_client = None + if self._client: + self._client.close() + self._client = None + + def publish_execution_event(self, event_data: Dict[str, Any]) -> bool: + """Publish an execution event to all workers via Redis pub/sub. + + Args: + event_data: Event data to broadcast + + Returns: + True if successful, False otherwise + """ + try: + message = json.dumps(event_data) + self.client.publish(self.PUBSUB_CHANNEL, message) + logger.debug("Published execution event to Redis", extra={ + "execution_id": event_data.get("execution_id"), + "type": event_data.get("type") + }) + return True + except Exception as e: + logger.error("Failed to publish execution event", extra={ + "error": str(e) + }, exc_info=True) + return False + + async def subscribe_to_execution_events(self, callback: Callable[[Dict[str, Any]], None]): + """Subscribe to execution events from Redis pub/sub. + + This should be run in a background task. It will call the callback + for each message received on the pub/sub channel. + + Args: + callback: Async function to call with each event + """ + try: + # Create separate Redis client for pub/sub (can't use the same one) + if self._pubsub_client is None: + self._pubsub_client = redis.from_url( + self.redis_url, + decode_responses=True, + socket_connect_timeout=5, + socket_timeout=5 + ) + + self._pubsub = self._pubsub_client.pubsub() + await asyncio.get_event_loop().run_in_executor( + None, + self._pubsub.subscribe, + self.PUBSUB_CHANNEL + ) + + logger.info("Subscribed to execution events", extra={ + "channel": self.PUBSUB_CHANNEL + }) + + # Listen for messages + while True: + message = await asyncio.get_event_loop().run_in_executor( + None, + self._pubsub.get_message, + True, # ignore_subscribe_messages + 1.0 # timeout + ) + + if message and message['type'] == 'message': + try: + event_data = json.loads(message['data']) + await callback(event_data) + except Exception as e: + logger.error("Error processing pub/sub message", extra={ + "error": str(e) + }, exc_info=True) + + # Small sleep to prevent tight loop + await asyncio.sleep(0.01) + + except Exception as e: + logger.error("Pub/sub subscription error", extra={ + "error": str(e) + }, exc_info=True) + + +# Global instance +_redis_manager: Optional[RedisSessionManager] = None + + +def get_redis_manager() -> RedisSessionManager: + """Get or create the global Redis session manager.""" + global _redis_manager + if _redis_manager is None: + _redis_manager = RedisSessionManager() + return _redis_manager diff --git a/server/app/component/schedule/trigger_schedule_task.py b/server/app/component/schedule/trigger_schedule_task.py new file mode 100644 index 000000000..4f592f67d --- /dev/null +++ b/server/app/component/schedule/trigger_schedule_task.py @@ -0,0 +1,175 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from celery import shared_task +import logging +from datetime import datetime, timezone +from sqlmodel import select, or_ + +from app.component.database import session_make +from app.component.environment import env +from app.service.trigger.trigger_schedule_service import TriggerScheduleService +from app.service.trigger.trigger_service import TriggerService +from app.component.trigger_utils import MAX_DISPATCH_PER_TICK +from app.component.redis_utils import get_redis_manager +from app.model.trigger.trigger_execution import TriggerExecution +from app.model.trigger.trigger import Trigger +from app.type.trigger_types import ExecutionStatus + +# Timeout configuration from environment variables +EXECUTION_PENDING_TIMEOUT_SECONDS = int(env("EXECUTION_PENDING_TIMEOUT_SECONDS", "60")) +EXECUTION_RUNNING_TIMEOUT_SECONDS = int(env("EXECUTION_RUNNING_TIMEOUT_SECONDS", "600")) # 10 minutes + + +@shared_task(queue="poll_trigger_schedules") +def poll_trigger_schedules() -> None: + """ + Celery task to poll and execute scheduled triggers. + This runs periodically to check for triggers that are due for execution. + + This is a lightweight wrapper around TriggerScheduleService that handles + session management and delegates all business logic to the service layer. + """ + logger.info("Starting poll_trigger_schedules task") + + session = session_make() + try: + # Create service instance with session + schedule_service = TriggerScheduleService(session) + + # Delegate all logic to the service + schedule_service.poll_and_execute_due_triggers( + max_dispatch_per_tick=MAX_DISPATCH_PER_TICK + ) + finally: + session.close() + + +@shared_task(queue="check_execution_timeouts") +def check_execution_timeouts() -> None: + """ + Celery task to check for timed-out pending and running executions. + + This runs periodically to find: + - Pending executions that haven't been acknowledged within EXECUTION_PENDING_TIMEOUT_SECONDS + - Running executions that have been stuck for more than EXECUTION_RUNNING_TIMEOUT_SECONDS + + These are marked as missed/failed respectively. + """ + logger.info("Starting check_execution_timeouts task", extra={ + "pending_timeout": EXECUTION_PENDING_TIMEOUT_SECONDS, + "running_timeout": EXECUTION_RUNNING_TIMEOUT_SECONDS + }) + + session = session_make() + redis_manager = get_redis_manager() + trigger_service = TriggerService(session) + + try: + now = datetime.now(timezone.utc) + + # Find all pending and running executions + executions = session.exec( + select(TriggerExecution).where( + or_( + TriggerExecution.status == ExecutionStatus.pending, + TriggerExecution.status == ExecutionStatus.running + ) + ) + ).all() + + timed_out_pending_count = 0 + timed_out_running_count = 0 + + for execution in executions: + is_pending = execution.status == ExecutionStatus.pending + is_running = execution.status == ExecutionStatus.running + + # Determine the reference time and timeout based on status + if is_pending: + reference_time = execution.created_at + timeout_seconds = EXECUTION_PENDING_TIMEOUT_SECONDS + else: # running + reference_time = execution.started_at or execution.created_at + timeout_seconds = EXECUTION_RUNNING_TIMEOUT_SECONDS + + if reference_time.tzinfo is None: + reference_time = reference_time.replace(tzinfo=timezone.utc) + time_elapsed = (now - reference_time).total_seconds() + + if time_elapsed > timeout_seconds: + # Determine the new status and error message + if is_pending: + new_status = ExecutionStatus.missed + error_message = f"Execution acknowledgment timeout ({timeout_seconds} seconds)" + timed_out_pending_count += 1 + else: + new_status = ExecutionStatus.failed + error_message = f"Execution running timeout ({timeout_seconds} seconds) - no completion received" + timed_out_running_count += 1 + + # Use TriggerService.update_execution_status for proper failure tracking + trigger_service.update_execution_status( + execution=execution, + status=new_status, + error_message=error_message + ) + + # Remove from Redis pending list (best effort, may not exist) + try: + # Get all sessions for this execution's user + trigger = session.get(Trigger, execution.trigger_id) + if trigger and trigger.user_id: + user_session_ids = redis_manager.get_user_sessions(trigger.user_id) + for session_id in user_session_ids: + redis_manager.remove_pending_execution(session_id, execution.execution_id) + elif not trigger: + logger.warning("Trigger not found for execution", extra={ + "execution_id": execution.execution_id, + "trigger_id": execution.trigger_id + }) + except Exception as e: + logger.warning("Failed to remove execution from Redis", extra={ + "execution_id": execution.execution_id, + "trigger_id": execution.trigger_id, + "error": str(e) + }) + + logger.info("Execution timed out", extra={ + "execution_id": execution.execution_id, + "trigger_id": execution.trigger_id, + "original_status": "pending" if is_pending else "running", + "new_status": new_status.value, + "time_elapsed": time_elapsed + }) + + total_timed_out = timed_out_pending_count + timed_out_running_count + if total_timed_out > 0: + logger.info("Marked executions as timed out", extra={ + "timed_out_pending_count": timed_out_pending_count, + "timed_out_running_count": timed_out_running_count, + "total_timed_out": total_timed_out + }) + else: + logger.debug("No timed-out executions found") + + except Exception as e: + logger.error("Error checking execution timeouts", extra={ + "error": str(e), + "error_type": type(e).__name__ + }, exc_info=True) + session.rollback() + + finally: + session.close() \ No newline at end of file diff --git a/server/app/component/service/trigger/__init__.py b/server/app/component/service/trigger/__init__.py new file mode 100644 index 000000000..f8bbaca25 --- /dev/null +++ b/server/app/component/service/trigger/__init__.py @@ -0,0 +1,54 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Trigger Service Package + +Contains services for managing triggers including: +- TriggerService: Main service for trigger operations +- TriggerScheduleService: Service for scheduled trigger operations +- App Handlers: Handlers for different trigger types (Slack, Webhook, Schedule) +""" + +from app.service.trigger.trigger_service import TriggerService, get_trigger_service +from app.service.trigger.trigger_schedule_service import TriggerScheduleService +from app.service.trigger.app_handler_service import ( + BaseAppHandler, + SlackAppHandler, + DefaultWebhookHandler, + ScheduleAppHandler, + AppHandlerResult, + get_app_handler, + get_schedule_handler, + register_app_handler, + get_supported_trigger_types, +) + +__all__ = [ + # Services + "TriggerService", + "get_trigger_service", + "TriggerScheduleService", + # Handlers + "BaseAppHandler", + "SlackAppHandler", + "DefaultWebhookHandler", + "ScheduleAppHandler", + "AppHandlerResult", + # Handler functions + "get_app_handler", + "get_schedule_handler", + "register_app_handler", + "get_supported_trigger_types", +] \ No newline at end of file diff --git a/server/app/component/service/trigger/app_handler_service.py b/server/app/component/service/trigger/app_handler_service.py new file mode 100644 index 000000000..0fd1c7a2c --- /dev/null +++ b/server/app/component/service/trigger/app_handler_service.py @@ -0,0 +1,446 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Trigger App Handler Service + +Modular service for handling app-specific webhook authentication, +filtering, and payload normalization based on trigger_type. +""" + +import re +from typing import Optional +from dataclasses import dataclass +from fastapi import Request +from sqlmodel import Session, select, and_ +import logging + +from app.model.trigger.trigger import Trigger +from app.model.config.config import Config +from app.model.trigger.app_configs import SlackTriggerConfig, WebhookTriggerConfig, ScheduleTriggerConfig +from app.type.trigger_types import TriggerType, ExecutionType, TriggerStatus +from app.type.config_group import ConfigGroup + + +@dataclass +class AppHandlerResult: + """Result from app handler operations.""" + success: bool + data: Optional[dict] = None + reason: Optional[str] = None + + +class BaseAppHandler: + """Base class for app-specific handlers.""" + + trigger_type: TriggerType + execution_type: ExecutionType = ExecutionType.webhook + config_group: Optional[str] = None + + async def get_credentials(self, session: Session, user_id: str) -> dict: + """Get user credentials from config table.""" + if not self.config_group: + return {} + + configs = session.exec( + select(Config).where( + and_( + Config.user_id == int(user_id), + Config.config_group == self.config_group + ) + ) + ).all() + return {config.config_name: config.config_value for config in configs} + + async def authenticate( + self, + request: Request, + body: bytes, + trigger: Trigger, + session: Session + ) -> AppHandlerResult: + """ + Authenticate the incoming webhook request. + Returns (success, challenge_response or None) + """ + return AppHandlerResult(success=True) + + async def filter_event( + self, + payload: dict, + trigger: Trigger + ) -> AppHandlerResult: + """ + Filter events based on trigger configuration. + Returns (should_process, reason) + """ + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize the payload for execution input.""" + return payload + + +class SlackAppHandler(BaseAppHandler): + """Handler for Slack triggers.""" + + trigger_type = TriggerType.slack_trigger + execution_type = ExecutionType.slack + config_group = ConfigGroup.SLACK.value + + async def authenticate( + self, + request: Request, + body: bytes, + trigger: Trigger, + session: Session + ) -> AppHandlerResult: + """Handle Slack authentication and URL verification.""" + from camel.auth.slack_auth import SlackAuth + + credentials = await self.get_credentials(session, trigger.user_id) + + slack_auth = SlackAuth( + signing_secret=credentials.get("SLACK_SIGNING_SECRET"), + bot_token=credentials.get("SLACK_BOT_TOKEN"), + api_token=credentials.get("SLACK_API_TOKEN"), + ) + + # Check for URL verification challenge + challenge_response = slack_auth.get_verification_response(request, body) + if challenge_response: + # Return the challenge response (already in correct format: {"challenge": "..."}) + logger.info(f"Slack URL verification - challenge_response: {challenge_response}") + return AppHandlerResult(success=True, data=challenge_response) + + # Verify webhook signature + if not slack_auth.verify_webhook_request(request, body): + logger.warning("Invalid Slack webhook signature", extra={ + "trigger_id": trigger.id + }) + return AppHandlerResult(success=False, reason="invalid_signature") + + return AppHandlerResult(success=True) + + async def filter_event( + self, + payload: dict, + trigger: Trigger + ) -> AppHandlerResult: + """Filter Slack events based on trigger config.""" + # Prefer 'config' field + config_data = trigger.config or {} + config = SlackTriggerConfig(**config_data) + event = payload.get("event", {}) + event_type = event.get("type", "") + + # Check event type + if not config.should_trigger(event_type): + return AppHandlerResult(success=False, reason="event_type_not_configured") + + # Check channel filter (if channel_id is set, only trigger for that channel) + if config.channel_id: + if event.get("channel") != config.channel_id: + return AppHandlerResult(success=False, reason="channel_not_matched") + + # Check bot message filter + if config.ignore_bot_messages: + if event.get("bot_id") or event.get("subtype") == "bot_message": + return AppHandlerResult(success=False, reason="bot_message_ignored") + + # Check user filter + if config.ignore_users and event.get("user") in config.ignore_users: + return AppHandlerResult(success=False, reason="user_filtered") + + # Check message filter regex + if config.message_filter and event.get("text"): + if not re.search(config.message_filter, event.get("text", ""), re.IGNORECASE): + return AppHandlerResult(success=False, reason="message_filter_not_matched") + + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize Slack event payload.""" + logger.info("Normalizing payload", extra={"payload": payload}) + # Prefer 'config' field + config_data = trigger.config or {} + config = SlackTriggerConfig(**config_data) + event = payload.get("event", {}) + + normalized = { + "event_type": event.get("type"), + "event_ts": event.get("event_ts"), + "team_id": payload.get("team_id"), + "user_id": event.get("user"), + "channel_id": event.get("channel"), + "text": event.get("text"), + "message_ts": event.get("ts"), + "thread_ts": event.get("thread_ts"), + "reaction": event.get("reaction"), + "files": event.get("files"), + "event_id": payload.get("event_id") or payload.get("id") + } + + # if config.include_raw_payload: + # normalized["raw_payload"] = payload + + return normalized + + +class DefaultWebhookHandler(BaseAppHandler): + """Default handler for generic webhooks with config-based filtering.""" + + trigger_type = TriggerType.webhook + execution_type = ExecutionType.webhook + + async def filter_event( + self, + payload: dict, + trigger: Trigger, + headers: dict = None, + body_raw: str = None + ) -> AppHandlerResult: + """Filter webhook events based on trigger config.""" + config_data = trigger.config or {} + config = WebhookTriggerConfig(**config_data) + + # Get text content for message_filter (check body for text field or stringify) + text = None + if isinstance(payload, dict): + text = payload.get("text") or payload.get("message") or payload.get("content") + if text is None and body_raw: + text = body_raw + + # Use the config's should_trigger method + should_trigger, reason = config.should_trigger( + body=body_raw or "", + headers=headers or {}, + text=text + ) + + if not should_trigger: + return AppHandlerResult(success=False, reason=reason) + + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize generic webhook payload with full request metadata.""" + config_data = trigger.config or {} + config = WebhookTriggerConfig(**config_data) + + result = {"body": payload} + + if request_meta: + # Include headers if configured + if config.include_headers and "headers" in request_meta: + result["headers"] = request_meta["headers"] + + # Include query params if configured + if config.include_query_params and "query_params" in request_meta: + result["query_params"] = request_meta["query_params"] + + # Include request metadata if configured + if config.include_request_metadata: + if "method" in request_meta: + result["method"] = request_meta["method"] + if "url" in request_meta: + result["url"] = request_meta["url"] + if "client_ip" in request_meta: + result["client_ip"] = request_meta["client_ip"] + + return result + + +class ScheduleAppHandler(BaseAppHandler): + """ + Handler for scheduled triggers. + + Manages schedule-specific logic including: + - Expiration checking (expirationDate for recurring schedules) + - Date validation for one-time executions (date field) + """ + + trigger_type = TriggerType.schedule + execution_type = ExecutionType.scheduled + + async def filter_event( + self, + payload: dict, + trigger: Trigger + ) -> AppHandlerResult: + """ + Filter scheduled events based on trigger config. + + Checks: + - If one-time (date set) and date has passed + - If recurring with expirationDate and it has passed + """ + config_data = trigger.config or {} + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + logger.warning( + "Invalid schedule config", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + # Allow execution if config is missing/invalid (backwards compatibility) + return AppHandlerResult(success=True, reason="ok") + + # Check if schedule should execute + should_execute, reason = config.should_execute() + + if not should_execute: + return AppHandlerResult(success=False, reason=reason) + + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize scheduled trigger payload.""" + config_data = trigger.config or {} + + normalized = { + "scheduled_at": payload.get("scheduled_at"), + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "is_single_execution": trigger.is_single_execution, + } + + # Include config details if present + if config_data: + if config_data.get("date"): + normalized["date"] = config_data.get("date") + if config_data.get("expirationDate"): + normalized["expirationDate"] = config_data.get("expirationDate") + + return normalized + + def check_and_handle_expiration( + self, + trigger: Trigger, + session: Session + ) -> bool: + """ + Check if a schedule has expired and handle accordingly. + + Args: + trigger: The trigger to check + session: Database session for updates + + Returns: + True if trigger is expired and was deactivated, False otherwise + """ + config_data = trigger.config or {} + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + logger.warning( + "Invalid schedule config during expiration check", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + return False + + if config.is_expired(): + # Deactivate the trigger + trigger.status = TriggerStatus.completed + session.add(trigger) + session.commit() + + logger.info( + "Schedule trigger expired and deactivated", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "expiration_date": config.expirationDate or config.date + } + ) + + return True + + return False + + def validate_schedule_for_execution( + self, + trigger: Trigger + ) -> tuple[bool, str]: + """ + Validate that a scheduled trigger is valid for execution. + + Args: + trigger: The trigger to validate + + Returns: + Tuple of (is_valid, reason) + """ + config_data = trigger.config or {} + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + return False, f"invalid_config: {str(e)}" + + # Check expiration + if config.is_expired(): + return False, "schedule_expired" + + return True, "ok" + + +# Registry of handlers by trigger_type +_HANDLERS: dict[TriggerType, BaseAppHandler] = { + TriggerType.slack_trigger: SlackAppHandler(), + TriggerType.webhook: DefaultWebhookHandler(), + TriggerType.schedule: ScheduleAppHandler(), +} + + +def get_app_handler(trigger_type: TriggerType) -> Optional[BaseAppHandler]: + """Get the handler for a trigger type.""" + return _HANDLERS.get(trigger_type) + + +def register_app_handler(trigger_type: TriggerType, handler: BaseAppHandler): + """Register a new app handler.""" + _HANDLERS[trigger_type] = handler + + +def get_supported_trigger_types() -> list[TriggerType]: + """Get list of trigger types with webhook support.""" + return list(_HANDLERS.keys()) + + +def get_schedule_handler() -> ScheduleAppHandler: + """Get the schedule handler instance.""" + return _HANDLERS.get(TriggerType.schedule) diff --git a/server/app/component/service/trigger/trigger_schedule_service.py b/server/app/component/service/trigger/trigger_schedule_service.py new file mode 100644 index 000000000..c18bdff0f --- /dev/null +++ b/server/app/component/service/trigger/trigger_schedule_service.py @@ -0,0 +1,428 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from datetime import datetime, timedelta, timezone +from typing import List, Tuple, Optional +import logging +from croniter import croniter +from uuid import uuid4 +import asyncio +from sqlmodel import select + +from app.model.trigger.trigger import Trigger +from app.model.trigger.trigger_execution import TriggerExecution +from app.type.trigger_types import TriggerStatus, ExecutionType, ExecutionStatus, TriggerType +from app.component.trigger_utils import check_rate_limits, MAX_DISPATCH_PER_TICK +from app.model.trigger.app_configs import ScheduleTriggerConfig + + +class TriggerScheduleService: + """Service for managing scheduled trigger operations. + This service mainly delegates schedule business logic + from the main trigger_service.py. + + Handles tasks from the Celery beat scheduler. + + Mainly handles: + - Polling for due schedules + - Dispatching scheduled triggers + - Calculating next run times based on cron expressions + """ + + def __init__(self, session): + """ + Initialize the schedule service with a database session. + + Args: + session: SQLModel session for database operations + """ + self.session = session + + def fetch_due_schedules(self, limit: Optional[int] = 100) -> List[Trigger]: + """ + Fetch triggers that are due for execution. + + Args: + limit: Maximum number of triggers to fetch + + Returns: + List of triggers that need to be executed + """ + now = datetime.now(timezone.utc) + + try: + statement = ( + select(Trigger) + .where(Trigger.trigger_type == TriggerType.schedule) + .where(Trigger.status == TriggerStatus.active) + .where(Trigger.next_run_at <= now) + .order_by(Trigger.next_run_at) + .limit(limit) + ) + + results = self.session.exec(statement).all() + + logger.debug( + "Fetched due schedules", + extra={ + "count": len(results), + "current_time": now.isoformat() + } + ) + + return list(results) + + except Exception as e: + logger.error( + "Failed to fetch due schedules", + extra={"error": str(e)}, + exc_info=True + ) + return [] + + def calculate_next_run_at( + self, + trigger: Trigger, + base_time: Optional[datetime] = None + ) -> datetime: + """ + Calculate the next run time for a trigger based on its cron expression. + + Args: + trigger: The trigger to calculate next run time for + base_time: Base time to calculate from (defaults to now) + + Returns: + The next scheduled run time + + Raises: + ValueError: If trigger has no cron expression or invalid expression + """ + if not trigger.custom_cron_expression: + raise ValueError(f"Trigger {trigger.id} has no cron expression") + + if base_time is None: + base_time = datetime.now(timezone.utc) + + try: + cron = croniter(trigger.custom_cron_expression, base_time) + next_run = cron.get_next(datetime) + return next_run + except Exception as e: + logger.error( + "Failed to calculate next run time", + extra={ + "trigger_id": trigger.id, + "cron_expression": trigger.custom_cron_expression, + "error": str(e) + } + ) + raise + + def dispatch_trigger(self, trigger: Trigger) -> bool: + """ + Dispatch a trigger for execution. + + Args: + trigger: The trigger to dispatch + + Returns: + True if dispatched successfully, False otherwise + """ + try: + # Check schedule expiration before dispatching + if not self._check_schedule_valid(trigger): + logger.info( + "Schedule trigger expired, skipping dispatch", + extra={"trigger_id": trigger.id, "trigger_name": trigger.name} + ) + return False + + # Create execution record + execution_id = str(uuid4()) + execution = TriggerExecution( + trigger_id=trigger.id, + execution_id=execution_id, + execution_type=ExecutionType.scheduled, + status=ExecutionStatus.pending, + input_data={"scheduled_at": datetime.now(timezone.utc).isoformat()}, + started_at=datetime.now(timezone.utc) + ) + + self.session.add(execution) + + # Update trigger statistics + trigger.last_executed_at = datetime.now(timezone.utc) + trigger.last_execution_status = "pending" + + # Calculate and set next run time + try: + trigger.next_run_at = self.calculate_next_run_at(trigger, datetime.now(timezone.utc)) + except Exception as e: + logger.error( + "Failed to calculate next run time, trigger will be skipped", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + # Set next_run_at far in the future to prevent immediate re-execution + trigger.next_run_at = datetime.now(timezone.utc) + timedelta(days=365) + + # If single execution, deactivate the trigger + if trigger.is_single_execution: + trigger.status = TriggerStatus.inactive + logger.info( + "Trigger deactivated after single execution", + extra={"trigger_id": trigger.id} + ) + + self.session.add(trigger) + self.session.commit() + + # TODO: Queue the actual task execution + # This would integrate with a task queue (e.g., Celery) to execute the trigger's action + # For now event is sent to client for execution + + logger.info( + "Trigger dispatched successfully", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "execution_id": execution_id, + "next_run_at": trigger.next_run_at.isoformat() if trigger.next_run_at else None + } + ) + + # Notify WebSocket subscribers + # Using asyncio.run() to run async code from sync Celery worker context + try: + # Notify WebSocket subscribers via Redis pub/sub + from app.component.redis_utils import get_redis_manager + redis_manager = get_redis_manager() + redis_manager.publish_execution_event({ + "type": "execution_created", + "execution_id": execution_id, + "trigger_id": trigger.id, + "trigger_type": "schedule", + "status": "pending", + "input_data": execution.input_data, + "task_prompt": trigger.task_prompt, + "execution_type": "schedule", + "user_id": str(trigger.user_id), + "project_id": str(trigger.project_id) + }) + + logger.debug("WebSocket notification sent", extra={ + "execution_id": execution_id, + "trigger_id": trigger.id + }) + except Exception as e: + # Don't fail the trigger dispatch if notification fails + logger.warning("Failed to send WebSocket notification", extra={ + "trigger_id": trigger.id, + "execution_id": execution_id, + "error": str(e) + }) + + return True + + except Exception as e: + logger.error( + "Failed to dispatch trigger", + extra={ + "trigger_id": trigger.id, + "error": str(e) + }, + exc_info=True + ) + self.session.rollback() + return False + + def process_schedules(self, due_schedules: List[Trigger]) -> Tuple[int, int]: + """ + Process due schedules, checking rate limits and dispatching. + + Args: + due_schedules: List of triggers that are due for execution + + Returns: + Tuple of (dispatched_count, rate_limited_count) + """ + dispatched_count = 0 + rate_limited_count = 0 + + for trigger in due_schedules: + # Check rate limits + if not check_rate_limits(self.session, trigger): + rate_limited_count += 1 + + # Still update next_run_at even if rate limited, so we don't keep checking + try: + trigger.next_run_at = self.calculate_next_run_at(trigger, datetime.now(timezone.utc)) + self.session.add(trigger) + self.session.commit() + except Exception as e: + logger.error( + "Failed to update next_run_at for rate limited trigger", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + + continue + + # Dispatch the trigger + if self.dispatch_trigger(trigger): + dispatched_count += 1 + + return dispatched_count, rate_limited_count + + def poll_and_execute_due_triggers( + self, + max_dispatch_per_tick: Optional[int] = None + ) -> Tuple[int, int]: + """ + Poll for due triggers and execute them in batches. + + Args: + max_dispatch_per_tick: Maximum number of triggers to dispatch in this tick + (defaults to MAX_DISPATCH_PER_TICK) + + Returns: + Tuple of (total_dispatched, total_rate_limited) + """ + max_dispatch = max_dispatch_per_tick or MAX_DISPATCH_PER_TICK + total_dispatched = 0 + total_rate_limited = 0 + + # Process in batches until we've handled all due schedules or hit the limit + while True: + due_schedules = self.fetch_due_schedules() + + if not due_schedules: + break + + dispatched_count, rate_limited_count = self.process_schedules(due_schedules) + total_dispatched += dispatched_count + total_rate_limited += rate_limited_count + + logger.debug( + "Batch processed", + extra={ + "dispatched": dispatched_count, + "rate_limited": rate_limited_count + } + ) + + # Check if we've hit the per-tick limit (if enabled) + if max_dispatch > 0 and total_dispatched >= max_dispatch: + logger.warning( + "Circuit breaker activated: reached dispatch limit, will continue next tick", + extra={"limit": max_dispatch} + ) + break + + if total_dispatched > 0 or total_rate_limited > 0: + logger.info( + "Trigger schedule poll completed", + extra={ + "total_dispatched": total_dispatched, + "total_rate_limited": total_rate_limited + } + ) + + return total_dispatched, total_rate_limited + + def _check_schedule_valid(self, trigger: Trigger) -> bool: + """ + Check if a scheduled trigger is valid for execution. + + Validates: + - For one-time (date set): Checks if the scheduled date has passed + - For recurring (expirationDate set): Checks if expirationDate has passed + + If expired, the trigger will be marked as completed. + + Args: + trigger: The trigger to check + + Returns: + True if trigger is valid for execution, False if expired + """ + config_data = trigger.config or {} + + # If no config or empty config, allow execution (no expiration) + if not config_data: + return True + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + logger.warning( + "Invalid schedule config", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + return False + + # Check if schedule has expired + if config.is_expired(): + # Mark trigger as completed + trigger.status = TriggerStatus.completed + self.session.add(trigger) + self.session.commit() + + logger.info( + "Schedule trigger expired and marked as completed", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "expiration_info": config.expirationDate or config.date + } + ) + return False + + return True + + def update_trigger_next_run(self, trigger: Trigger) -> None: + """ + Update a trigger's next_run_at based on its cron expression. + + Args: + trigger: The trigger to update + """ + try: + # Check if schedule is expired before updating next run + if not self._check_schedule_valid(trigger): + logger.info( + "Trigger expired, not updating next_run_at", + extra={"trigger_id": trigger.id} + ) + return + + trigger.next_run_at = self.calculate_next_run_at(trigger) + self.session.add(trigger) + self.session.commit() + + logger.info( + "Trigger next_run_at updated", + extra={ + "trigger_id": trigger.id, + "next_run_at": trigger.next_run_at.isoformat() + } + ) + except Exception as e: + logger.error( + "Failed to update trigger next_run_at", + extra={ + "trigger_id": trigger.id, + "error": str(e) + } + ) + self.session.rollback() diff --git a/server/app/component/service/trigger/trigger_service.py b/server/app/component/service/trigger/trigger_service.py new file mode 100644 index 000000000..8a6559ff9 --- /dev/null +++ b/server/app/component/service/trigger/trigger_service.py @@ -0,0 +1,391 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from datetime import datetime, timedelta, timezone +from typing import Optional, List, Dict, Any +from sqlmodel import select, and_, or_ +from uuid import uuid4 +import logging + +from app.model.trigger.trigger import Trigger +from app.model.trigger.trigger_execution import TriggerExecution +from app.type.trigger_types import TriggerType, TriggerStatus, ExecutionType, ExecutionStatus +from app.component.database import session_make +from app.service.trigger.trigger_schedule_service import TriggerScheduleService +from app.component.trigger_utils import SCHEDULED_FETCH_BATCH_SIZE, check_rate_limits +from app.model.trigger.app_configs import ScheduleTriggerConfig, WebhookTriggerConfig +from app.model.trigger.app_configs.base_config import BaseTriggerConfig + + + +class TriggerService: + """Service for managing trigger operations and scheduling.""" + + def __init__(self, session=None): + self.session = session or session_make() + self.schedule_service = TriggerScheduleService(self.session) + + def create_execution( + self, + trigger: Trigger, + execution_type: ExecutionType, + input_data: Optional[Dict[str, Any]] = None + ) -> TriggerExecution: + """Create a new trigger execution.""" + execution_id = str(uuid4()) + + execution = TriggerExecution( + trigger_id=trigger.id, + execution_id=execution_id, + execution_type=execution_type, + status=ExecutionStatus.pending, + input_data=input_data or {}, + started_at=datetime.now(timezone.utc) + ) + + self.session.add(execution) + self.session.commit() + self.session.refresh(execution) + + # Update trigger statistics + trigger.last_executed_at = datetime.now(timezone.utc) + trigger.last_execution_status = "pending" + self.session.add(trigger) + self.session.commit() + + logger.info("Execution created", extra={ + "trigger_id": trigger.id, + "execution_id": execution_id, + "execution_type": execution_type.value + }) + + return execution + + def update_execution_status( + self, + execution: TriggerExecution, + status: ExecutionStatus, + output_data: Optional[Dict[str, Any]] = None, + error_message: Optional[str] = None, + tokens_used: Optional[int] = None, + tools_executed: Optional[Dict[str, Any]] = None + ) -> TriggerExecution: + """Update execution status and metadata.""" + execution.status = status + + # Set completed_at and duration for terminal statuses + if status in [ExecutionStatus.completed, ExecutionStatus.failed, ExecutionStatus.cancelled, ExecutionStatus.missed]: + execution.completed_at = datetime.now(timezone.utc) + if execution.started_at: + # Ensure started_at is timezone-aware for subtraction + started_at = execution.started_at + if started_at.tzinfo is None: + started_at = started_at.replace(tzinfo=timezone.utc) + execution.duration_seconds = (execution.completed_at - started_at).total_seconds() + + if output_data: + execution.output_data = output_data + + if error_message: + execution.error_message = error_message + + if tokens_used: + execution.tokens_used = tokens_used + + if tools_executed: + execution.tools_executed = tools_executed + + self.session.add(execution) + self.session.commit() + + # Update trigger status and handle auto-disable logic + trigger = self.session.get(Trigger, execution.trigger_id) + if trigger: + if status == ExecutionStatus.failed: + trigger.last_execution_status = "failed" + trigger.consecutive_failures += 1 + + # Check for auto-disable based on max_failure_count in config + self._check_auto_disable(trigger) + + elif status == ExecutionStatus.completed: + trigger.last_execution_status = "completed" + # Reset consecutive failures on success + trigger.consecutive_failures = 0 + elif status == ExecutionStatus.cancelled: + trigger.last_execution_status = "cancelled" + elif status == ExecutionStatus.missed: + trigger.last_execution_status = "missed" + + self.session.add(trigger) + self.session.commit() + + logger.info("Execution status updated", extra={ + "execution_id": execution.execution_id, + "status": status.name, + "duration": execution.duration_seconds + }) + + return execution + + def _check_auto_disable(self, trigger: Trigger) -> bool: + """ + Check if trigger should be auto-disabled based on consecutive failures. + + Args: + trigger: The trigger to check + + Returns: + True if trigger was auto-disabled, False otherwise + """ + if not trigger.config: + return False + + try: + # Get the appropriate config class based on trigger type + config: BaseTriggerConfig + if trigger.trigger_type == TriggerType.schedule: + config = ScheduleTriggerConfig(**trigger.config) + elif trigger.trigger_type == TriggerType.webhook: + config = WebhookTriggerConfig(**trigger.config) + else: + # For other trigger types, use base config + config = BaseTriggerConfig(**trigger.config) + + # Check if auto-disable should happen + if config.should_auto_disable(trigger.consecutive_failures): + trigger.status = TriggerStatus.inactive + trigger.auto_disabled_at = datetime.now(timezone.utc) + + logger.warning( + "Trigger auto-disabled due to max failures", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "consecutive_failures": trigger.consecutive_failures, + "max_failure_count": config.max_failure_count + } + ) + return True + + except Exception as e: + logger.error( + "Failed to check auto-disable for trigger", + extra={ + "trigger_id": trigger.id, + "error": str(e) + } + ) + + return False + + def get_pending_executions(self) -> List[TriggerExecution]: + """Get all pending executions that need to be processed.""" + executions = self.session.exec( + select(TriggerExecution).where( + TriggerExecution.status == ExecutionStatus.pending + ).order_by(TriggerExecution.created_at) + ).all() + + return list(executions) + + def get_failed_executions_for_retry(self) -> List[TriggerExecution]: + """Get failed executions that can be retried.""" + executions = self.session.exec( + select(TriggerExecution).where( + and_( + TriggerExecution.status == ExecutionStatus.failed, + TriggerExecution.attempts < TriggerExecution.max_retries + ) + ).order_by(TriggerExecution.created_at) + ).all() + + return list(executions) + + def get_due_scheduled_triggers(self, limit: Optional[int] = None) -> List[Trigger]: + """ + Fetch scheduled triggers that are due for execution. + + Args: + limit: Maximum number of triggers to fetch (defaults to SCHEDULED_FETCH_BATCH_SIZE) + + Returns: + List of triggers that are due for execution + """ + current_time = datetime.now(timezone.utc) + limit = limit or SCHEDULED_FETCH_BATCH_SIZE + + # Query triggers that: + # 1. Are scheduled type + # 2. Are active + # 3. Have a cron expression + # 4. next_run_at is null (never run) or next_run_at <= now + triggers = self.session.exec( + select(Trigger) + .where( + and_( + Trigger.trigger_type == TriggerType.schedule, + Trigger.status == TriggerStatus.active, + Trigger.custom_cron_expression.is_not(None), + or_( + Trigger.next_run_at.is_(None), + Trigger.next_run_at <= current_time + ) + ) + ) + .limit(limit) + ).all() + + return list(triggers) + + def execute_scheduled_triggers(self) -> int: + """ + Execute all due scheduled triggers. + Uses TriggerScheduleService for the actual execution logic. + """ + due_triggers = self.get_due_scheduled_triggers() + + if not due_triggers: + return 0 + + dispatched_count, rate_limited_count = self.schedule_service.process_schedules(due_triggers) + + logger.info( + "Scheduled triggers execution completed", + extra={ + "dispatched": dispatched_count, + "rate_limited": rate_limited_count + } + ) + + return dispatched_count + + def process_slack_trigger( + self, + trigger: Trigger, + slack_data: Dict[str, Any] + ) -> Optional[TriggerExecution]: + """Process a Slack trigger event.""" + if trigger.trigger_type != TriggerType.slack_trigger: + raise ValueError("Trigger is not a Slack trigger") + + if trigger.status != TriggerStatus.active: + logger.warning("Slack trigger is not active", extra={ + "trigger_id": trigger.id + }) + return None + + if not check_rate_limits(self.session, trigger): + logger.warning("Slack trigger execution skipped due to rate limits", extra={ + "trigger_id": trigger.id + }) + return None + + try: + execution = self.create_execution( + trigger=trigger, + execution_type=ExecutionType.slack, + input_data=slack_data + ) + + # TODO: Queue the actual task execution + + logger.info("Slack trigger executed", extra={ + "trigger_id": trigger.id, + "execution_id": execution.execution_id + }) + + return execution + + except Exception as e: + logger.error("Slack trigger execution failed", extra={ + "trigger_id": trigger.id, + "error": str(e) + }, exc_info=True) + return None + + def cleanup_old_executions(self, days_to_keep: int = 30) -> int: + """Clean up old execution records.""" + cutoff_date = datetime.now(timezone.utc) - timedelta(days=days_to_keep) + + old_executions = self.session.exec( + select(TriggerExecution).where( + and_( + TriggerExecution.created_at < cutoff_date, + TriggerExecution.status.in_([ + ExecutionStatus.completed, + ExecutionStatus.failed, + ExecutionStatus.cancelled + ]) + ) + ) + ).all() + + count = len(old_executions) + + for execution in old_executions: + self.session.delete(execution) + + self.session.commit() + + logger.info("Old executions cleaned up", extra={ + "count": count, + "days_to_keep": days_to_keep + }) + + return count + + def get_trigger_statistics(self, trigger_id: int) -> Dict[str, Any]: + """Get statistics for a specific trigger.""" + trigger = self.session.get(Trigger, trigger_id) + if not trigger: + raise ValueError("Trigger not found") + + # Get execution counts by status + executions = self.session.exec( + select(TriggerExecution).where( + TriggerExecution.trigger_id == trigger_id + ) + ).all() + + stats = { + "trigger_id": trigger_id, + "name": trigger.name, + "trigger_type": trigger.trigger_type.value, + "status": trigger.status.name, + "total_executions": len(executions), + "successful_executions": len([e for e in executions if e.status == ExecutionStatus.completed]), + "failed_executions": len([e for e in executions if e.status == ExecutionStatus.failed]), + "pending_executions": len([e for e in executions if e.status == ExecutionStatus.pending]), + "cancelled_executions": len([e for e in executions if e.status == ExecutionStatus.cancelled]), + "last_executed_at": trigger.last_executed_at.isoformat() if trigger.last_executed_at else None, + "created_at": trigger.created_at.isoformat() if trigger.created_at else None + } + + # Calculate average execution time for completed executions + completed_executions = [e for e in executions if e.status == ExecutionStatus.completed and e.duration_seconds] + if completed_executions: + avg_duration = sum(e.duration_seconds for e in completed_executions) / len(completed_executions) + stats["average_execution_time_seconds"] = round(avg_duration, 2) + + # Calculate total tokens used + total_tokens = sum(e.tokens_used for e in executions if e.tokens_used) + if total_tokens: + stats["total_tokens_used"] = total_tokens + + return stats + +def get_trigger_service(session=None) -> TriggerService: + """Factory function to create a TriggerService instance with a fresh session.""" + return TriggerService(session) \ No newline at end of file diff --git a/server/app/component/trigger_utils.py b/server/app/component/trigger_utils.py new file mode 100644 index 000000000..b37cf98f4 --- /dev/null +++ b/server/app/component/trigger_utils.py @@ -0,0 +1,95 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Rate limiting utilities for triggers.""" +from datetime import datetime, timedelta, timezone +from typing import TYPE_CHECKING +import logging +from sqlmodel import select, and_ + +from app.model.trigger.trigger_execution import TriggerExecution +from app.component.environment import env + +logger = logging.getLogger("server_trigger_utils") + +if TYPE_CHECKING: + from sqlmodel import Session + from app.model.trigger.trigger import Trigger + + +# Environment variable configuration with defaults +MAX_DISPATCH_PER_TICK = int(env("TRIGGER_SCHEDULE_MAX_DISPATCH_PER_TICK", "0")) # Max triggers to dispatch per tick +SCHEDULED_FETCH_BATCH_SIZE = int(env("TRIGGER_SCHEDULE_POLLER_BATCH_SIZE", "100")) # Fetch batch size + + +def check_rate_limits(session: "Session", trigger: "Trigger") -> bool: + """ + Check if trigger execution is within rate limits. + + Args: + session: Database session + trigger: The trigger to check rate limits for + + Returns: + True if execution is allowed, False if rate limited + """ + current_time = datetime.now(timezone.utc) + + # Check hourly limit + if trigger.max_executions_per_hour: + hour_ago = current_time - timedelta(hours=1) + hourly_count = session.exec( + select(TriggerExecution).where( + and_( + TriggerExecution.trigger_id == trigger.id, + TriggerExecution.created_at >= hour_ago + ) + ) + ).all() + + if len(hourly_count) >= trigger.max_executions_per_hour: + logger.warning( + "Trigger hourly rate limit exceeded", + extra={ + "trigger_id": trigger.id, + "limit": trigger.max_executions_per_hour, + "current_count": len(hourly_count) + } + ) + return False + + # Check daily limit + if trigger.max_executions_per_day: + day_ago = current_time - timedelta(days=1) + daily_count = session.exec( + select(TriggerExecution).where( + and_( + TriggerExecution.trigger_id == trigger.id, + TriggerExecution.created_at >= day_ago + ) + ) + ).all() + + if len(daily_count) >= trigger.max_executions_per_day: + logger.warning( + "Trigger daily rate limit exceeded", + extra={ + "trigger_id": trigger.id, + "limit": trigger.max_executions_per_day, + "current_count": len(daily_count) + } + ) + return False + + return True diff --git a/server/app/controller/chat/history_controller.py b/server/app/controller/chat/history_controller.py index 8cc169b8a..16bdd2b26 100644 --- a/server/app/controller/chat/history_controller.py +++ b/server/app/controller/chat/history_controller.py @@ -14,11 +14,13 @@ import logging from collections import defaultdict +from typing import Optional from fastapi import APIRouter, Depends, HTTPException, Query, Response from fastapi_pagination import Page from fastapi_pagination.ext.sqlmodel import paginate -from sqlmodel import Session, case, desc, select +from app.model.trigger.trigger import Trigger +from sqlmodel import Session, case, desc, select, func from app.component.auth import Auth, auth_must from app.component.database import session @@ -38,6 +40,27 @@ router = APIRouter(prefix="/chat", tags=["Chat History"]) +def is_real_task(history: ChatHistory) -> bool: + """ + Check if a task is a real task vs a placeholder/trigger-created task. + Excludes placeholder tasks created during trigger creation. + """ + # Has actual token usage + if history.tokens and history.tokens > 0: + return True + + # Has real model configuration (not placeholder "none" values) + if (history.model_platform and history.model_platform != "none" and + history.model_type and history.model_type != "none" and + history.installed_mcp and history.installed_mcp != "none"): + return True + + # Check if question starts with trigger placeholder prefix + if history.question and history.question.startswith("Project created via trigger:"): + return False + + # Default to real task if no placeholder indicators + return True @router.post("/history", name="save chat history", response_model=ChatHistoryOut) def create_chat_history(data: ChatHistoryIn, session: Session = Depends(session), auth: Auth = Depends(auth_must)): @@ -89,9 +112,9 @@ def list_chat_history(session: Session = Depends(session), auth: Auth = Depends( @router.get("/histories/grouped", name="get grouped chat history") def list_grouped_chat_history( - include_tasks: bool | None = Query(True, description="Whether to include individual tasks in groups"), + include_tasks: Optional[bool] = Query(True, description="Whether to include individual tasks in groups"), session: Session = Depends(session), - auth: Auth = Depends(auth_must), + auth: Auth = Depends(auth_must) ) -> GroupedHistoryResponse: """List chat histories grouped by project_id for current user.""" user_id = auth.user.id @@ -103,75 +126,83 @@ def list_grouped_chat_history( .order_by( desc(case((ChatHistory.created_at.is_(None), 0), else_=1)), # Non-null created_at first desc(ChatHistory.created_at), # Then by created_at descending - desc(ChatHistory.id), # Finally by id descending for records with same/null created_at + desc(ChatHistory.id) # Finally by id descending for records with same/null created_at ) ) histories = session.exec(stmt).all() - # Group histories by project_id - project_map: dict[str, dict] = defaultdict( - lambda: { - "project_id": "", - "project_name": None, - "total_tokens": 0, - "task_count": 0, - "latest_task_date": "", - "last_prompt": None, - "tasks": [], - "total_completed_tasks": 0, - "total_ongoing_tasks": 0, - "average_tokens_per_task": 0, - } + # Get trigger counts per project + trigger_count_stmt = ( + select(Trigger.project_id, func.count(Trigger.id).label('count')) + .where(Trigger.user_id == str(user_id)) + .group_by(Trigger.project_id) ) + trigger_counts = session.exec(trigger_count_stmt).all() + trigger_count_map = {project_id: count for project_id, count in trigger_counts} + # Group histories by project_id + project_map = defaultdict(lambda: { + 'project_id': '', + 'project_name': None, + 'total_tokens': 0, + 'task_count': 0, + 'latest_task_date': '', + 'last_prompt': None, + 'tasks': [], + 'total_completed_tasks': 0, + 'total_ongoing_tasks': 0, + 'average_tokens_per_task': 0, + 'total_triggers': 0 + }) + for history in histories: # Use project_id if available, fallback to task_id project_id = history.project_id if history.project_id else history.task_id project_data = project_map[project_id] # Initialize project data - if not project_data["project_id"]: - project_data["project_id"] = project_id - project_data["project_name"] = history.project_name or f"Project {project_id}" - project_data["latest_task_date"] = history.created_at.isoformat() if history.created_at else "" - project_data["last_prompt"] = history.question # Set the most recent question + if not project_data['project_id']: + project_data['project_id'] = project_id + project_data['project_name'] = history.project_name or f"Project {project_id}" + project_data['latest_task_date'] = history.created_at.isoformat() if history.created_at else '' + project_data['last_prompt'] = history.question # Set the most recent question # Convert to ChatHistoryOut format history_out = ChatHistoryOut(**history.model_dump()) - # Add task to project if requested - if include_tasks: - project_data["tasks"].append(history_out) - - # Update project statistics - project_data["task_count"] += 1 - project_data["total_tokens"] += history.tokens or 0 - - # Count completed and failed tasks - # ChatStatus.ongoing = 1, ChatStatus.done = 2 - if history.status == ChatStatus.done: - project_data["total_completed_tasks"] += 1 - elif history.status == ChatStatus.ongoing: - project_data["total_ongoing_tasks"] += 1 - else: - # Only count as failed if not ongoing and not done - project_data["total_failed_tasks"] += 1 - - # Update latest task date and last prompt - if history.created_at: - task_date = history.created_at.isoformat() - if not project_data["latest_task_date"] or task_date > project_data["latest_task_date"]: - project_data["latest_task_date"] = task_date - project_data["last_prompt"] = history.question + # Add task to project if requested (only real tasks) + if include_tasks and is_real_task(history): + project_data['tasks'].append(history_out) + + # Update project statistics (only for real tasks) + if is_real_task(history): + project_data['task_count'] += 1 + project_data['total_tokens'] += history.tokens or 0 + + if history.status == ChatStatus.done: + project_data['total_completed_tasks'] += 1 + elif history.status == ChatStatus.ongoing: + project_data['total_ongoing_tasks'] += 1 + + # Update latest task date and last prompt + if history.created_at: + task_date = history.created_at.isoformat() + if not project_data['latest_task_date'] or task_date > project_data['latest_task_date']: + project_data['latest_task_date'] = task_date + project_data['last_prompt'] = history.question # Convert to ProjectGroup objects and sort projects = [] for project_data in project_map.values(): # Sort tasks within each project by creation date (oldest first) if include_tasks: - project_data["tasks"].sort(key=lambda x: (x.created_at is None, x.created_at or ""), reverse=False) - + project_data['tasks'].sort(key=lambda x: (x.created_at is None, x.created_at or ''), reverse=False) + + # Set trigger count from trigger_count_map + project_id = project_data['project_id'] + project_data['total_triggers'] = trigger_count_map.get(project_id, 0) + project_group = ProjectGroup(**project_data) projects.append(project_group) @@ -179,18 +210,112 @@ def list_grouped_chat_history( projects.sort(key=lambda x: x.latest_task_date, reverse=True) response = GroupedHistoryResponse(projects=projects) + + logger.debug("Grouped chat histories listed", extra={ + "user_id": user_id, + "total_projects": response.total_projects, + "total_tasks": response.total_tasks, + "include_tasks": include_tasks + }) + + return response + - logger.debug( - "Grouped chat histories listed", - extra={ - "user_id": user_id, - "total_projects": response.total_projects, - "total_tasks": response.total_tasks, - "include_tasks": include_tasks, - }, +@router.get("/histories/grouped/{project_id}", name="get single grouped project") +def get_grouped_project( + project_id: str, + include_tasks: Optional[bool] = Query(True, description="Whether to include individual tasks in the project"), + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +) -> ProjectGroup: + """Get a single project group by project_id for current user.""" + user_id = auth.user.id + + # Get all histories for the specific project + stmt = ( + select(ChatHistory) + .where(ChatHistory.user_id == user_id) + .where(ChatHistory.project_id == project_id) + .order_by( + desc(case((ChatHistory.created_at.is_(None), 0), else_=1)), + desc(ChatHistory.created_at), + desc(ChatHistory.id) + ) ) - return response + histories = session.exec(stmt).all() + + if not histories: + raise HTTPException(status_code=404, detail="Project not found") + + # Get trigger count for this project + trigger_count_stmt = ( + select(func.count(Trigger.id)) + .where(Trigger.user_id == str(user_id)) + .where(Trigger.project_id == project_id) + ) + trigger_count = session.exec(trigger_count_stmt).first() or 0 + + # Build project data + project_data = { + 'project_id': project_id, + 'project_name': None, + 'total_tokens': 0, + 'task_count': 0, + 'latest_task_date': '', + 'last_prompt': None, + 'tasks': [], + 'total_completed_tasks': 0, + 'total_ongoing_tasks': 0, + 'average_tokens_per_task': 0, + 'total_triggers': trigger_count + } + + for history in histories: + # Initialize project name from first history + if not project_data['project_name']: + project_data['project_name'] = history.project_name or f"Project {project_id}" + project_data['latest_task_date'] = history.created_at.isoformat() if history.created_at else '' + project_data['last_prompt'] = history.question + + # Convert to ChatHistoryOut format + history_out = ChatHistoryOut(**history.model_dump()) + + # Add task to project if requested (only real tasks) + if include_tasks and is_real_task(history): + project_data['tasks'].append(history_out) + + # Update project statistics (only for real tasks) + if is_real_task(history): + project_data['task_count'] += 1 + project_data['total_tokens'] += history.tokens or 0 + + if history.status == ChatStatus.done: + project_data['total_completed_tasks'] += 1 + elif history.status == ChatStatus.ongoing: + project_data['total_ongoing_tasks'] += 1 + + # Update latest task date and last prompt + if history.created_at: + task_date = history.created_at.isoformat() + if not project_data['latest_task_date'] or task_date > project_data['latest_task_date']: + project_data['latest_task_date'] = task_date + project_data['last_prompt'] = history.question + + # Sort tasks within the project by creation date (oldest first) + if include_tasks: + project_data['tasks'].sort(key=lambda x: (x.created_at is None, x.created_at or ''), reverse=False) + + project_group = ProjectGroup(**project_data) + + logger.debug("Single grouped project retrieved", extra={ + "user_id": user_id, + "project_id": project_id, + "task_count": project_group.task_count, + "include_tasks": include_tasks + }) + + return project_group @router.delete("/history/{history_id}", name="delete chat history") diff --git a/server/app/controller/trigger/__init__.py b/server/app/controller/trigger/__init__.py new file mode 100644 index 000000000..3a4d90c0e --- /dev/null +++ b/server/app/controller/trigger/__init__.py @@ -0,0 +1,14 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + diff --git a/server/app/controller/trigger/slack_controller.py b/server/app/controller/trigger/slack_controller.py new file mode 100644 index 000000000..8981c44f8 --- /dev/null +++ b/server/app/controller/trigger/slack_controller.py @@ -0,0 +1,135 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from fastapi import APIRouter, Depends, HTTPException +from sqlmodel import Session, select, and_ +from typing import Optional, List +import logging +from pydantic import BaseModel + +from app.model.config.config import Config +from app.type.config_group import ConfigGroup +from app.component.auth import Auth, auth_must +from app.component.database import session + +logger = logging.getLogger("server_slack_controller") + + +class SlackChannelOut(BaseModel): + """Output model for Slack channels.""" + id: str + name: str + is_private: bool = False + is_member: bool = False + num_members: Optional[int] = None + + +class SlackChannelsResponse(BaseModel): + """Response model for Slack channels list.""" + channels: List[SlackChannelOut] + has_credentials: bool + + +router = APIRouter(prefix="/trigger/slack", tags=["Slack Integration"]) + + +@router.get("/channels", name="get slack channels") +def get_slack_channels( + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +) -> SlackChannelsResponse: + """ + Get list of Slack channels for the authenticated user. + + This endpoint fetches channels from the user's Slack workspace using their + stored credentials. Requires SLACK_BOT_TOKEN to be configured in user configs. + """ + user_id = auth.user.id + + # Get Slack credentials from config + configs = session.exec( + select(Config).where( + and_( + Config.user_id == int(user_id), + Config.config_group == ConfigGroup.SLACK.value + ) + ) + ).all() + + credentials = {config.config_name: config.config_value for config in configs} + bot_token = credentials.get("SLACK_BOT_TOKEN") + + if not bot_token: + logger.warning("Slack credentials not found", extra={"user_id": user_id}) + return SlackChannelsResponse(channels=[], has_credentials=False) + + try: + from slack_sdk import WebClient + from slack_sdk.errors import SlackApiError + + client = WebClient(token=bot_token) + + # Fetch all channels (public and private the bot has access to) + channels = [] + cursor = None + + while True: + response = client.conversations_list( + types="public_channel,private_channel", + cursor=cursor, + limit=200 + ) + + for channel in response.get("channels", []): + channels.append(SlackChannelOut( + id=channel.get("id"), + name=channel.get("name"), + is_private=channel.get("is_private", False), + is_member=channel.get("is_member", False), + num_members=channel.get("num_members") + )) + + # Check for pagination + cursor = response.get("response_metadata", {}).get("next_cursor") + if not cursor: + break + + logger.info("Slack channels fetched", extra={ + "user_id": user_id, + "channel_count": len(channels) + }) + + return SlackChannelsResponse(channels=channels, has_credentials=True) + + except ImportError: + logger.error("slack_sdk not installed") + raise HTTPException( + status_code=500, + detail="Slack SDK not installed on server" + ) + except SlackApiError as e: + logger.error("Slack API error", extra={ + "user_id": user_id, + "error": str(e) + }) + raise HTTPException( + status_code=400, + detail=f"Slack API error: {e.response.get('error', 'Unknown error')}" + ) + except Exception as e: + logger.error("Error fetching Slack channels", extra={ + "user_id": user_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Failed to fetch Slack channels") diff --git a/server/app/controller/trigger/trigger_controller.py b/server/app/controller/trigger/trigger_controller.py new file mode 100644 index 000000000..a3595aef4 --- /dev/null +++ b/server/app/controller/trigger/trigger_controller.py @@ -0,0 +1,688 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from fastapi import APIRouter, Depends, HTTPException, Response, Query +from fastapi_pagination import Page +from fastapi_pagination.ext.sqlmodel import paginate +from sqlmodel import Session, select, desc, and_, delete +from typing import Optional +from uuid import uuid4 +import logging +from pydantic import ValidationError + +from app.model.trigger.trigger import Trigger, TriggerIn, TriggerOut, TriggerUpdate, TriggerConfigSchemaOut +from app.model.trigger.trigger_execution import TriggerExecution, TriggerExecutionOut +from app.model.trigger.app_configs import ( + get_config_schema, + validate_config, + has_config, + validate_activation, + ActivationError, +) +from app.model.trigger.app_configs.config_registry import requires_authentication +from app.model.chat.chat_history import ChatHistory +from app.type.trigger_types import TriggerType, TriggerStatus +from app.component.auth import Auth, auth_must +from app.component.database import session +from app.component.redis_utils import get_redis_manager +from app.service.trigger.trigger_schedule_service import TriggerScheduleService +from fastapi_babel import _ +from sqlalchemy import func + +logger = logging.getLogger("server_trigger_controller") + + +def get_execution_counts(session: Session, trigger_ids: list[int]) -> dict[int, int]: + """Get execution counts for multiple triggers in a single query.""" + if not trigger_ids: + return {} + + result = session.exec( + select(TriggerExecution.trigger_id, func.count(TriggerExecution.id)) + .where(TriggerExecution.trigger_id.in_(trigger_ids)) + .group_by(TriggerExecution.trigger_id) + ).all() + + return {trigger_id: count for trigger_id, count in result} + + +def trigger_to_out(trigger: Trigger, execution_count: int = 0) -> TriggerOut: + """Convert Trigger model to TriggerOut with execution count.""" + return TriggerOut( + id=trigger.id, + user_id=trigger.user_id, + project_id=trigger.project_id, + name=trigger.name, + description=trigger.description, + trigger_type=trigger.trigger_type, + status=trigger.status, + execution_count=execution_count, + webhook_url=trigger.webhook_url, + webhook_method=trigger.webhook_method, + custom_cron_expression=trigger.custom_cron_expression, + listener_type=trigger.listener_type, + agent_model=trigger.agent_model, + task_prompt=trigger.task_prompt, + config=trigger.config, + max_executions_per_hour=trigger.max_executions_per_hour, + max_executions_per_day=trigger.max_executions_per_day, + is_single_execution=trigger.is_single_execution, + last_executed_at=trigger.last_executed_at, + next_run_at=trigger.next_run_at, + last_execution_status=trigger.last_execution_status, + created_at=trigger.created_at, + updated_at=trigger.updated_at, + ) + + +router = APIRouter(prefix="/trigger", tags=["Triggers"]) + +@router.post("/", name="create trigger", response_model=TriggerOut) +def create_trigger( + data: TriggerIn, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Create a new trigger.""" + user_id = auth.user.id + + try: + # Check user trigger limit (max 25 triggers per user) + user_trigger_count = session.exec( + select(func.count(Trigger.id)).where(Trigger.user_id == str(user_id)) + ).one() + + if user_trigger_count >= 25: + logger.warning("User trigger limit reached", extra={ + "user_id": user_id, + "current_count": user_trigger_count, + "limit": 25 + }) + raise HTTPException( + status_code=400, + detail="Maximum number of triggers (25) reached for this user" + ) + + # Check project trigger limit (max 5 triggers per project) + if data.project_id: + project_trigger_count = session.exec( + select(func.count(Trigger.id)).where( + and_( + Trigger.user_id == str(user_id), + Trigger.project_id == data.project_id + ) + ) + ).one() + + if project_trigger_count >= 5: + logger.warning("Project trigger limit reached", extra={ + "user_id": user_id, + "project_id": data.project_id, + "current_count": project_trigger_count, + "limit": 5 + }) + raise HTTPException( + status_code=400, + detail="Maximum number of triggers (5) reached for this project" + ) + + # Check if project_id exists in chat_history, if not create one + if data.project_id: + existing_chat = session.exec( + select(ChatHistory).where(ChatHistory.project_id == data.project_id) + ).first() + + if not existing_chat: + # Create a new chat_history for this project + chat_history = ChatHistory( + user_id=user_id, + task_id=data.project_id, # Using project_id as task_id + project_id=data.project_id, + question=f"Project created via trigger: {data.name}", + language="en", + model_platform=data.agent_model or "none", + model_type=data.agent_model or "none", + installed_mcp="none", #Expects String + api_key="", + api_url="", + max_retries=3, + project_name=data.name, + summary=data.description or "", + tokens=0, + spend=0, + status=2 # completed status + ) + session.add(chat_history) + session.commit() + session.refresh(chat_history) + + logger.info("Chat history created for new project", extra={ + "user_id": user_id, + "project_id": data.project_id, + "chat_history_id": chat_history.id + }) + + # Send WebSocket notification about new project + try: + redis_manager = get_redis_manager() + redis_manager.publish_execution_event({ + "type": "project_created", + "project_id": data.project_id, + "project_name": data.name, + "chat_history_id": chat_history.id, + "trigger_name": data.name, + "user_id": str(user_id), + "created_at": chat_history.created_at.isoformat() if chat_history.created_at else None + }) + logger.debug("WebSocket notification sent for new project", extra={ + "user_id": user_id, + "project_id": data.project_id + }) + except Exception as e: + logger.warning("Failed to send WebSocket notification for new project", extra={ + "user_id": user_id, + "project_id": data.project_id, + "error": str(e) + }) + + # Generate webhook URL for webhook-based triggers + webhook_url = None + if data.trigger_type in (TriggerType.webhook, TriggerType.slack_trigger): + webhook_url = f"/webhook/trigger/{uuid4()}" + + # Validate trigger-type specific config + if data.config and has_config(data.trigger_type): + try: + validate_config(data.trigger_type, data.config) + except ValidationError as e: + logger.warning("Invalid trigger config", extra={ + "user_id": user_id, + "trigger_type": data.trigger_type.value, + "errors": e.errors() + }) + raise HTTPException( + status_code=400, + detail=f"Invalid config for {data.trigger_type.value}: {e.errors()}" + ) + + # Create trigger instance + trigger_data = data.model_dump() + trigger_data["user_id"] = str(user_id) + trigger_data["webhook_url"] = webhook_url + + # Check if authentication is required - set initial status accordingly + if has_config(data.trigger_type) and data.config and requires_authentication(data.trigger_type, data.config): + trigger_data["status"] = TriggerStatus.pending_verification + else: + trigger_data["status"] = TriggerStatus.active + + trigger = Trigger(**trigger_data) + session.add(trigger) + session.commit() + session.refresh(trigger) + + # Calculate next_run_at for scheduled triggers + if trigger.trigger_type == TriggerType.schedule and trigger.custom_cron_expression: + schedule_service = TriggerScheduleService(session) + trigger.next_run_at = schedule_service.calculate_next_run_at(trigger) + session.add(trigger) + session.commit() + session.refresh(trigger) + + logger.info("Trigger created", extra={ + "user_id": user_id, + "trigger_id": trigger.id, + "trigger_type": data.trigger_type.value, + "next_run_at": trigger.next_run_at.isoformat() if trigger.next_run_at else None + }) + + return trigger_to_out(trigger, 0) # New trigger has 0 executions + + except Exception as e: + session.rollback() + logger.error("Trigger creation failed", extra={ + "user_id": user_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/", name="list triggers") +def list_triggers( + trigger_type: Optional[TriggerType] = Query(None, description="Filter by trigger type"), + status: Optional[TriggerStatus] = Query(None, description="Filter by status"), + project_id: Optional[str] = Query(None, description="Filter by project ID"), + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +) -> Page[TriggerOut]: + """List triggers for current user.""" + user_id = auth.user.id + + # Build query with filters + conditions = [Trigger.user_id == str(user_id)] + + if trigger_type: + conditions.append(Trigger.trigger_type == trigger_type) + + if status is not None: + conditions.append(Trigger.status == status) + + if project_id: + conditions.append(Trigger.project_id == project_id) + + stmt = ( + select(Trigger) + .where(and_(*conditions)) + .order_by(desc(Trigger.created_at)) + ) + + result = paginate(session, stmt) + total = result.total if hasattr(result, 'total') else 0 + + # Get execution counts for all triggers in the result + trigger_ids = [t.id for t in result.items] + counts = get_execution_counts(session, trigger_ids) + + # Convert triggers to TriggerOut with execution counts + result.items = [trigger_to_out(t, counts.get(t.id, 0)) for t in result.items] + + logger.debug("Triggers listed", extra={ + "user_id": user_id, + "total": total, + "filters": { + "trigger_type": trigger_type.value if trigger_type else None, + "status": status.value if status is not None else None, + "project_id": project_id + } + }) + + return result + +@router.get("/{trigger_id}", name="get trigger", response_model=TriggerOut) +def get_trigger( + trigger_id: int, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Get a specific trigger by ID.""" + user_id = auth.user.id + + trigger = session.exec( + select(Trigger).where( + and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id)) + ) + ).first() + + if not trigger: + logger.warning("Trigger not found", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + raise HTTPException(status_code=404, detail="Trigger not found") + + # Get execution count + counts = get_execution_counts(session, [trigger_id]) + execution_count = counts.get(trigger_id, 0) + + logger.debug("Trigger retrieved", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + + return trigger_to_out(trigger, execution_count) + + +@router.put("/{trigger_id}", name="update trigger", response_model=TriggerOut) +def update_trigger( + trigger_id: int, + data: TriggerUpdate, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Update a trigger.""" + user_id = auth.user.id + + trigger = session.exec( + select(Trigger).where( + and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id)) + ) + ).first() + + if not trigger: + logger.warning("Trigger not found for update", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + raise HTTPException(status_code=404, detail="Trigger not found") + + try: + update_data = data.model_dump(exclude_unset=True) + + # Validate config if being updated + if "config" in update_data and update_data["config"] is not None: + if has_config(trigger.trigger_type): + try: + validate_config(trigger.trigger_type, update_data["config"]) + except ValidationError as e: + logger.warning("Invalid trigger config on update", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "trigger_type": trigger.trigger_type.value, + "errors": e.errors() + }) + raise HTTPException( + status_code=400, + detail=f"Invalid config for {trigger.trigger_type.value}: {e.errors()}" + ) + + for key, value in update_data.items(): + setattr(trigger, key, value) + + # Recalculate next_run_at if cron expression or status changed for scheduled triggers + if trigger.trigger_type == TriggerType.schedule: + if "custom_cron_expression" in update_data or "status" in update_data: + if trigger.status == TriggerStatus.active and trigger.custom_cron_expression: + schedule_service = TriggerScheduleService(session) + trigger.next_run_at = schedule_service.calculate_next_run_at(trigger) + + session.add(trigger) + session.commit() + session.refresh(trigger) + + # Get execution count + counts = get_execution_counts(session, [trigger_id]) + execution_count = counts.get(trigger_id, 0) + + logger.info("Trigger updated", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "fields_updated": list(update_data.keys()), + "next_run_at": trigger.next_run_at.isoformat() if trigger.next_run_at else None + }) + + return trigger_to_out(trigger, execution_count) + + except Exception as e: + session.rollback() + logger.error("Trigger update failed", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/{trigger_id}", name="delete trigger") +def delete_trigger( + trigger_id: int, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Delete a trigger.""" + user_id = auth.user.id + + trigger = session.exec( + select(Trigger).where( + and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id)) + ) + ).first() + + if not trigger: + logger.warning("Trigger not found for deletion", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + raise HTTPException(status_code=404, detail="Trigger not found") + + try: + # Delete execution logs first (bulk delete) + session.exec( + delete(TriggerExecution).where( + TriggerExecution.trigger_id == trigger_id + ) + ) + + # Then delete the trigger + session.delete(trigger) + + session.commit() + + logger.info("Trigger deleted", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + + return Response(status_code=204) + + except Exception as e: + session.rollback() + logger.error("Trigger deletion failed", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.post("/{trigger_id}/activate", name="activate trigger", response_model=TriggerOut) +def activate_trigger( + trigger_id: int, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Activate a trigger.""" + user_id = auth.user.id + + trigger = session.exec( + select(Trigger).where( + and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id)) + ) + ).first() + + if not trigger: + logger.warning("Trigger not found for activation", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + raise HTTPException(status_code=404, detail="Trigger not found") + + try: + # Check activation requirements for trigger types with configs + if has_config(trigger.trigger_type): + try: + validate_activation( + trigger_type=trigger.trigger_type, + config_data=trigger.config, + user_id=int(user_id), + session=session + ) + except ActivationError as e: + logger.warning("Trigger activation requirements not met", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "trigger_type": trigger.trigger_type.value, + "missing_requirements": e.missing_requirements + }) + raise HTTPException( + status_code=400, + detail={ + "message": e.message, + "missing_requirements": e.missing_requirements, + "trigger_type": trigger.trigger_type.value + } + ) + + # Check if authentication is required - set to pending_verification if so + if has_config(trigger.trigger_type) and requires_authentication(trigger.trigger_type, trigger.config): + trigger.status = TriggerStatus.pending_verification + logger.info("Trigger set to pending verification (authentication required)", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "trigger_type": trigger.trigger_type.value + }) + # Save the status change before raising the exception + session.add(trigger) + session.commit() + session.refresh(trigger) + raise HTTPException( + status_code=401, + detail={ + "message": "Authentication required for this trigger type", + "missing_requirements": ["authentication"], + "trigger_type": trigger.trigger_type.value + } + ) + else: + trigger.status = TriggerStatus.active + session.add(trigger) + session.commit() + session.refresh(trigger) + + # Get execution count + counts = get_execution_counts(session, [trigger_id]) + execution_count = counts.get(trigger_id, 0) + + logger.info("Trigger status updated", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "status": trigger.status.value + }) + + return trigger_to_out(trigger, execution_count) + + except HTTPException: + raise + except Exception as e: + session.rollback() + logger.error("Trigger activation failed", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.post("/{trigger_id}/deactivate", name="deactivate trigger", response_model=TriggerOut) +def deactivate_trigger( + trigger_id: int, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Deactivate a trigger.""" + user_id = auth.user.id + + trigger = session.exec( + select(Trigger).where( + and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id)) + ) + ).first() + + if not trigger: + logger.warning("Trigger not found for deactivation", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + raise HTTPException(status_code=404, detail="Trigger not found") + + try: + trigger.status = TriggerStatus.inactive + session.add(trigger) + session.commit() + session.refresh(trigger) + + # Get execution count + counts = get_execution_counts(session, [trigger_id]) + execution_count = counts.get(trigger_id, 0) + + logger.info("Trigger deactivated", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + + return trigger_to_out(trigger, execution_count) + + except Exception as e: + session.rollback() + logger.error("Trigger deactivation failed", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/{trigger_id}/executions", name="list trigger executions") +def list_trigger_executions( + trigger_id: int, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +) -> Page[TriggerExecutionOut]: + """List executions for a specific trigger.""" + user_id = auth.user.id + + # First verify the trigger belongs to the user + trigger = session.exec( + select(Trigger).where( + and_(Trigger.id == trigger_id, Trigger.user_id == str(user_id)) + ) + ).first() + + if not trigger: + logger.warning("Trigger not found for executions list", extra={ + "user_id": user_id, + "trigger_id": trigger_id + }) + raise HTTPException(status_code=404, detail="Trigger not found") + + # Get executions for this trigger + stmt = ( + select(TriggerExecution) + .where(TriggerExecution.trigger_id == trigger_id) + .order_by(desc(TriggerExecution.created_at)) + ) + + result = paginate(session, stmt) + total = result.total if hasattr(result, 'total') else 0 + + logger.debug("Trigger executions listed", extra={ + "user_id": user_id, + "trigger_id": trigger_id, + "total": total + }) + + return result + + +# ============================================================================ +# Trigger Config Endpoints +# ============================================================================ + +@router.get("/{trigger_type}/config", name="get trigger type config schema") +def get_trigger_type_config( + trigger_type: TriggerType, + auth: Auth = Depends(auth_must) +) -> TriggerConfigSchemaOut: + """ + Get the configuration schema for a specific trigger type. + + This endpoint returns the JSON schema for the trigger type's config field, + which can be used by the frontend to dynamically render configuration forms. + """ + schema = get_config_schema(trigger_type) + + return TriggerConfigSchemaOut( + trigger_type=trigger_type.value, + has_config=has_config(trigger_type), + schema_=schema + ) \ No newline at end of file diff --git a/server/app/controller/trigger/trigger_execution_controller.py b/server/app/controller/trigger/trigger_execution_controller.py new file mode 100644 index 000000000..1c8385493 --- /dev/null +++ b/server/app/controller/trigger/trigger_execution_controller.py @@ -0,0 +1,783 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from fastapi import APIRouter, Depends, HTTPException, Response, WebSocket, WebSocketDisconnect +from fastapi_pagination import Page +from fastapi_pagination.ext.sqlmodel import paginate +from sqlmodel import Session, select, desc, and_ +from typing import Optional, Dict, Any +from datetime import datetime, timezone +from uuid import uuid4 +import logging +import asyncio + +from app.model.trigger.trigger_execution import ( + TriggerExecution, + TriggerExecutionIn, + TriggerExecutionOut, + TriggerExecutionUpdate +) +from app.model.trigger.trigger import Trigger +from app.model.user.user import User +from app.type.trigger_types import ExecutionStatus, ExecutionType +from app.component.auth import Auth, auth_must +from app.component.database import session +from app.component.redis_utils import get_redis_manager +from app.service.trigger.trigger_service import TriggerService + +logger = logging.getLogger("server_trigger_execution_controller") + +# Store active WebSocket connections per session (WebSocket objects only, metadata in Redis) +# Format: {session_id: WebSocket} +# This is per-worker, and Redis pub/sub is used to broadcast across workers +active_websockets: Dict[str, WebSocket] = {} + +# Background task for Redis pub/sub +_pubsub_task = None + +router = APIRouter(prefix="/execution", tags=["Trigger Executions"]) + + +@router.post("/", name="create trigger execution", response_model=TriggerExecutionOut) +async def create_trigger_execution( + data: TriggerExecutionIn, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Create a new trigger execution.""" + user_id = auth.user.id + + # Verify the trigger exists and belongs to the user + trigger = session.exec( + select(Trigger).where( + and_(Trigger.id == data.trigger_id, Trigger.user_id == str(user_id)) + ) + ).first() + + if not trigger: + logger.warning("Trigger not found for execution creation", extra={ + "user_id": user_id, + "trigger_id": data.trigger_id + }) + raise HTTPException(status_code=404, detail="Trigger not found") + + try: + execution_data = data.model_dump() + execution = TriggerExecution(**execution_data) + + session.add(execution) + session.commit() + session.refresh(execution) + + # Update trigger last executed timestamp + trigger.last_executed_at = datetime.now(timezone.utc) + session.add(trigger) + session.commit() + + logger.info("Trigger execution created", extra={ + "user_id": user_id, + "trigger_id": data.trigger_id, + "execution_id": execution.execution_id, + "execution_type": data.execution_type.value + }) + + # Publish to Redis pub/sub (broadcasts to all workers) + redis_manager = get_redis_manager() + redis_manager.publish_execution_event({ + "type": "execution_created", + "execution_id": execution.execution_id, + "trigger_id": trigger.id, + "trigger_type": trigger.trigger_type.value if trigger.trigger_type else "unknown", + "task_prompt": trigger.task_prompt, + "status": execution.status.value, + "input_data": execution.input_data, + "execution_type": data.execution_type.value, + "user_id": str(user_id), + "timestamp": datetime.now(timezone.utc).isoformat(), + "project_id": str(trigger.project_id) + }) + + return execution + + except Exception as e: + session.rollback() + logger.error("Trigger execution creation failed", extra={ + "user_id": user_id, + "trigger_id": data.trigger_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/", name="list executions") +def list_executions( + trigger_id: Optional[int] = None, + status: Optional[ExecutionStatus] = None, + execution_type: Optional[ExecutionType] = None, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +) -> Page[TriggerExecutionOut]: + """List trigger executions for current user.""" + user_id = auth.user.id + + # Get all trigger IDs that belong to the user + user_trigger_ids = session.exec( + select(Trigger.id).where(Trigger.user_id == str(user_id)) + ).all() + + if not user_trigger_ids: + # User has no triggers, return empty result + return Page(items=[], total=0, page=1, size=50, pages=0) + + # Build conditions + conditions = [TriggerExecution.trigger_id.in_(user_trigger_ids)] + + if trigger_id: + if trigger_id not in user_trigger_ids: + raise HTTPException(status_code=404, detail="Trigger not found") + conditions.append(TriggerExecution.trigger_id == trigger_id) + + if status is not None: + conditions.append(TriggerExecution.status == status) + + if execution_type: + conditions.append(TriggerExecution.execution_type == execution_type) + + stmt = ( + select(TriggerExecution) + .where(and_(*conditions)) + .order_by(desc(TriggerExecution.created_at)) + ) + + result = paginate(session, stmt) + total = result.total if hasattr(result, 'total') else 0 + + logger.debug("Executions listed", extra={ + "user_id": user_id, + "total": total, + "filters": { + "trigger_id": trigger_id, + "status": status.value if status is not None else None, + "execution_type": execution_type.value if execution_type else None + } + }) + + return result + + +@router.get("/{execution_id}", name="get execution", response_model=TriggerExecutionOut) +def get_execution( + execution_id: str, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Get a specific execution by execution ID.""" + user_id = auth.user.id + + # Get the execution and verify ownership through trigger + execution = session.exec( + select(TriggerExecution) + .join(Trigger) + .where( + and_( + TriggerExecution.execution_id == execution_id, + Trigger.user_id == str(user_id) + ) + ) + ).first() + + if not execution: + logger.warning("Execution not found", extra={ + "user_id": user_id, + "execution_id": execution_id + }) + raise HTTPException(status_code=404, detail="Execution not found") + + logger.debug("Execution retrieved", extra={ + "user_id": user_id, + "execution_id": execution_id + }) + + return execution + + +@router.put("/{execution_id}", name="update execution", response_model=TriggerExecutionOut) +async def update_execution( + execution_id: str, + data: TriggerExecutionUpdate, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Update a trigger execution.""" + user_id = auth.user.id + + # Get the execution and verify ownership through trigger + execution = session.exec( + select(TriggerExecution) + .join(Trigger) + .where( + and_( + TriggerExecution.execution_id == execution_id, + Trigger.user_id == str(user_id) + ) + ) + ).first() + + if not execution: + logger.warning("Execution not found for update", extra={ + "user_id": user_id, + "execution_id": execution_id + }) + raise HTTPException(status_code=404, detail="Execution not found") + + try: + update_data = data.model_dump(exclude_unset=True) + + # Check if status is being updated - use TriggerService for proper failure tracking + if "status" in update_data: + trigger_service = TriggerService(session) + # Convert status string back to enum for TriggerService + status_value = ExecutionStatus(update_data["status"]) if isinstance(update_data["status"], str) else update_data["status"] + trigger_service.update_execution_status( + execution=execution, + status=status_value, + output_data=update_data.get("output_data"), + error_message=update_data.get("error_message"), + tokens_used=update_data.get("tokens_used"), + tools_executed=update_data.get("tools_executed") + ) + # Remove status-related fields from update_data since TriggerService handled them + for key in ["status", "output_data", "error_message", "tokens_used", "tools_executed"]: + update_data.pop(key, None) + + # Update remaining fields + if update_data: + # Auto-calculate duration if both started_at and completed_at are set + if ("started_at" in update_data or "completed_at" in update_data) and execution.started_at: + completed_at = update_data.get("completed_at") or execution.completed_at + if completed_at: + # Ensure both datetimes are timezone-aware for subtraction + started_at = execution.started_at + if started_at.tzinfo is None: + started_at = started_at.replace(tzinfo=timezone.utc) + if completed_at.tzinfo is None: + completed_at = completed_at.replace(tzinfo=timezone.utc) + duration = (completed_at - started_at).total_seconds() + update_data["duration_seconds"] = duration + + for key, value in update_data.items(): + setattr(execution, key, value) + + session.add(execution) + session.commit() + + session.refresh(execution) + + # Get trigger for event publishing + trigger = session.get(Trigger, execution.trigger_id) + + logger.info("Execution updated", extra={ + "user_id": user_id, + "execution_id": execution_id, + "fields_updated": list(data.model_dump(exclude_unset=True).keys()) + }) + + # Publish to Redis pub/sub (broadcasts to all workers) + redis_manager = get_redis_manager() + redis_manager.publish_execution_event({ + "type": "execution_updated", + "execution_id": execution_id, + "trigger_id": execution.trigger_id, + "status": execution.status.value, + "updated_fields": list(update_data.keys()), + "user_id": str(user_id), + "timestamp": datetime.now(timezone.utc).isoformat(), + "project_id": str(trigger.project_id) if trigger else None + }) + + return execution + + except Exception as e: + session.rollback() + logger.error("Execution update failed", extra={ + "user_id": user_id, + "execution_id": execution_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.delete("/{execution_id}", name="delete execution") +def delete_execution( + execution_id: str, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Delete a trigger execution.""" + user_id = auth.user.id + + # Get the execution and verify ownership through trigger + execution = session.exec( + select(TriggerExecution) + .join(Trigger) + .where( + and_( + TriggerExecution.execution_id == execution_id, + Trigger.user_id == str(user_id) + ) + ) + ).first() + + if not execution: + logger.warning("Execution not found for deletion", extra={ + "user_id": user_id, + "execution_id": execution_id + }) + raise HTTPException(status_code=404, detail="Execution not found") + + try: + session.delete(execution) + session.commit() + + logger.info("Execution deleted", extra={ + "user_id": user_id, + "execution_id": execution_id + }) + + return Response(status_code=204) + + except Exception as e: + session.rollback() + logger.error("Execution deletion failed", extra={ + "user_id": user_id, + "execution_id": execution_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.post("/{execution_id}/retry", name="retry execution", response_model=TriggerExecutionOut) +def retry_execution( + execution_id: str, + session: Session = Depends(session), + auth: Auth = Depends(auth_must) +): + """Retry a failed execution.""" + user_id = auth.user.id + + # Get the execution and verify ownership through trigger + execution = session.exec( + select(TriggerExecution) + .join(Trigger) + .where( + and_( + TriggerExecution.execution_id == execution_id, + Trigger.user_id == str(user_id) + ) + ) + ).first() + + if not execution: + logger.warning("Execution not found for retry", extra={ + "user_id": user_id, + "execution_id": execution_id + }) + raise HTTPException(status_code=404, detail="Execution not found") + + if execution.status != ExecutionStatus.failed: + raise HTTPException(status_code=400, detail="Only failed executions can be retried") + + if execution.attempts >= execution.max_retries: + raise HTTPException(status_code=400, detail="Maximum retry attempts exceeded") + + try: + # Create a new execution for the retry + new_execution_id = str(uuid4()) + new_execution = TriggerExecution( + trigger_id=execution.trigger_id, + execution_id=new_execution_id, + execution_type=execution.execution_type, + input_data=execution.input_data, + attempts=execution.attempts + 1, + max_retries=execution.max_retries + ) + + session.add(new_execution) + session.commit() + session.refresh(new_execution) + + # Get trigger for event publishing + trigger = session.get(Trigger, execution.trigger_id) + + logger.info("Execution retry created", extra={ + "user_id": user_id, + "original_execution_id": execution_id, + "new_execution_id": new_execution_id, + "attempts": new_execution.attempts + }) + + # Publish to Redis pub/sub (broadcasts to all workers) + redis_manager = get_redis_manager() + redis_manager.publish_execution_event({ + "type": "execution_created", + "execution_id": new_execution.execution_id, + "trigger_id": trigger.id if trigger else execution.trigger_id, + "trigger_type": trigger.trigger_type.value if trigger and trigger.trigger_type else "unknown", + "task_prompt": trigger.task_prompt if trigger else None, + "status": new_execution.status.value, + "input_data": new_execution.input_data, + "execution_type": new_execution.execution_type.value, + "user_id": str(user_id), + "timestamp": datetime.now(timezone.utc).isoformat(), + "project_id": str(trigger.project_id) if trigger else None + }) + + return new_execution + + except Exception as e: + session.rollback() + logger.error("Execution retry failed", extra={ + "user_id": user_id, + "execution_id": execution_id, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.websocket("/subscribe") +async def subscribe_executions(websocket: WebSocket): + """Subscribe to trigger execution events via WebSocket. + + Client sends: {"type": "subscribe", "session_id": "unique-session-id", "auth_token": "bearer-token"} + Client acknowledges execution: {"type": "ack", "execution_id": "exec-id"} + + Server sends: {"type": "execution_created", "execution_id": "...", ...} + Server sends: {"type": "heartbeat", "timestamp": "..."} + """ + # Ensure pub/sub listener is started in THIS worker process + await start_pubsub_listener() + + await websocket.accept() + session_id = None + user_id = None + db_session = None + + try: + # Create database session manually for WebSocket + from app.component.database import session_make + db_session = session_make() + # Wait for subscription message + data = await websocket.receive_json() + + if data.get("type") != "subscribe" or not data.get("session_id"): + await websocket.send_json({ + "type": "error", + "message": "Invalid subscription. Send {type: 'subscribe', session_id: 'your-session-id', auth_token: 'bearer-token'}" + }) + await websocket.close() + return + + session_id = data["session_id"] + auth_token = data.get("auth_token") + + # Authenticate user + if not auth_token: + await websocket.send_json({ + "type": "error", + "message": "Authentication required. Provide 'auth_token' in subscription message" + }) + await websocket.close() + return + + try: + from app.component.auth import Auth + # Decode token and fetch user + auth = Auth.decode_token(auth_token) + user = db_session.get(User, auth.id) + if not user: + raise Exception("User not found") + auth._user = user + user_id = auth.user.id + logger.info(f"User authenticated for WebSocket {user_id} and {session_id}", extra={ + "user_id": user_id, + "session_id": session_id + }) + except Exception as e: + await websocket.send_json({ + "type": "error", + "message": "Authentication failed" + }) + await websocket.close() + logger.warning("WebSocket authentication failed", extra={ + "session_id": session_id, + "error": str(e) + }) + return + + # Register session in Redis and store WebSocket reference + redis_manager = get_redis_manager() + redis_manager.store_session(session_id, str(user_id)) + active_websockets[session_id] = websocket + + logger.info(f"WebSocket session registered", extra={ + "session_id": session_id, + "user_id": user_id, + "total_active": len(active_websockets) + }) + + await websocket.send_json({ + "type": "connected", + "session_id": session_id, + "timestamp": datetime.now(timezone.utc).isoformat() + }) + + logger.info("Client subscribed to executions", extra={ + "session_id": session_id, + "user_id": user_id, + "total_sessions": len(active_websockets), + "all_session_ids": list(active_websockets.keys()) + }) + + # Handle incoming messages (acknowledgments) + async def handle_messages(): + while True: + try: + msg = await websocket.receive_json() + + if msg.get("type") == "ack" and msg.get("execution_id"): + execution_id = msg["execution_id"] + + # Remove from pending in Redis + redis_manager.remove_pending_execution(session_id, execution_id) + + # Update execution status to running + execution = db_session.exec( + select(TriggerExecution).where( + TriggerExecution.execution_id == execution_id + ) + ).first() + + if execution and execution.status == ExecutionStatus.pending: + execution.status = ExecutionStatus.running + execution.started_at = datetime.now(timezone.utc) + db_session.add(execution) + db_session.commit() + + logger.info("Execution acknowledged and started", extra={ + "session_id": session_id, + "execution_id": execution_id + }) + + await websocket.send_json({ + "type": "ack_confirmed", + "execution_id": execution_id, + "status": "running" + }) + + elif msg.get("type") == "ping": + # Publish pong through Redis pub/sub + redis_manager.publish_execution_event({ + "type": "pong", + "session_id": session_id, + "user_id": str(user_id), + "timestamp": datetime.now(timezone.utc).isoformat() + }) + + except WebSocketDisconnect: + break + + # Start heartbeat task + async def send_heartbeat(): + while True: + await asyncio.sleep(30) + try: + await websocket.send_json({ + "type": "heartbeat", + "timestamp": datetime.now(timezone.utc).isoformat() + }) + except: + break + + # Run both tasks concurrently + await asyncio.gather( + handle_messages(), + send_heartbeat(), + return_exceptions=True + ) + + except WebSocketDisconnect as e: + logger.info("Client disconnected", extra={ + "session_id": session_id, + "disconnect_code": getattr(e, 'code', None), + "reason": "websocket_disconnect" + }) + except Exception as e: + logger.error("WebSocket error", extra={"session_id": session_id, "error": str(e)}, exc_info=True) + finally: + # Mark pending executions as missed + if session_id: + redis_manager = get_redis_manager() + + # Clean up session from Redis and local WebSocket dict + redis_manager.remove_session(session_id) + if session_id in active_websockets: + del active_websockets[session_id] + logger.info("Session cleaned up", extra={"session_id": session_id}) + + # Close database session + if db_session: + db_session.close() + + +async def handle_pubsub_message(event_data: Dict[str, Any]): + """Handle execution events from Redis pub/sub. + + This function is called by each worker when a message is published. + Each worker will send the message to its own local WebSocket connections. + """ + try: + event_type = event_data.get("type") + logger.info(f"[PUBSUB] Received event from Redis: {event_type}", extra={ + "event_type": event_type, + "execution_id": event_data.get("execution_id"), + "user_id": event_data.get("user_id") + }) + + # Handle pong events - send only to the specific session + if event_type == "pong": + target_session_id = event_data.get("session_id") + if target_session_id and target_session_id in active_websockets: + try: + ws = active_websockets[target_session_id] + await ws.send_json({ + "type": "pong", + "timestamp": event_data.get("timestamp") + }) + logger.debug("Pong sent via Redis pub/sub", extra={ + "session_id": target_session_id + }) + except Exception as e: + logger.error("Failed to send pong", extra={ + "session_id": target_session_id, + "error": str(e) + }) + return + + execution_id = event_data.get("execution_id") + event_user_id = event_data.get("user_id") + + if not event_user_id: + logger.warning("Event missing user_id, cannot filter subscribers", extra={ + "execution_id": execution_id + }) + return + + # Get user sessions from Redis + redis_manager = get_redis_manager() + user_session_ids = redis_manager.get_user_sessions(event_user_id) + + # Get user sessions from Redis and match with local connections + logger.debug(f"User has {len(user_session_ids)} active session(s)", extra={ + "user_id": event_user_id, + "session_count": len(user_session_ids) + }) + + # Only notify sessions that are connected to THIS worker + local_sessions = set(active_websockets.keys()) & user_session_ids + + if not local_sessions: + logger.debug("No local WebSocket connections for this user", extra={ + "user_id": event_user_id, + "execution_id": execution_id + }) + return # No local connections for this user + + logger.info(f"Broadcasting execution to {len(local_sessions)} WebSocket(s)", extra={ + "execution_id": execution_id, + "user_id": event_user_id, + "session_count": len(local_sessions) + }) + + disconnected_sessions = [] + notified_count = 0 + + for session_id in local_sessions: + try: + ws = active_websockets.get(session_id) + if not ws: + disconnected_sessions.append(session_id) + continue + + # Send execution event + await ws.send_json(event_data) + notified_count += 1 + + # Track as pending if it's a new execution + if event_data.get("type") == "execution_created" and execution_id: + redis_manager.add_pending_execution(session_id, execution_id) + # Confirm delivery for webhook to proceed + redis_manager.confirm_delivery(execution_id, session_id) + + logger.debug("Notified session of execution", extra={ + "session_id": session_id, + "user_id": event_user_id, + "execution_id": execution_id + }) + + except Exception as e: + logger.error("Failed to notify session", extra={ + "session_id": session_id, + "error": str(e) + }) + disconnected_sessions.append(session_id) + + # Clean up disconnected sessions + for session_id in disconnected_sessions: + redis_manager.remove_session(session_id) + if session_id in active_websockets: + del active_websockets[session_id] + + if notified_count > 0: + logger.debug("Execution event broadcast complete", extra={ + "execution_id": execution_id, + "user_id": event_user_id, + "sessions_notified": notified_count + }) + + except Exception as e: + logger.error(f"Error handling pub/sub message: {str(e)}", extra={ + "error": str(e), + "error_type": type(e).__name__, + "event_data": event_data + }, exc_info=True) + + +async def start_pubsub_listener(): + """Start the Redis pub/sub listener for this worker.""" + global _pubsub_task + + if _pubsub_task is not None: + return # Already started + + import os + logger.info(f"[PID {os.getpid()}] Starting Redis pub/sub listener for execution events") + redis_manager = get_redis_manager() + + async def run_subscriber(): + try: + await redis_manager.subscribe_to_execution_events(handle_pubsub_message) + except Exception as e: + logger.error("Pub/sub listener crashed", extra={"error": str(e)}, exc_info=True) + + _pubsub_task = asyncio.create_task(run_subscriber()) \ No newline at end of file diff --git a/server/app/controller/trigger/webhook_controller.py b/server/app/controller/trigger/webhook_controller.py new file mode 100644 index 000000000..665f0f7a2 --- /dev/null +++ b/server/app/controller/trigger/webhook_controller.py @@ -0,0 +1,348 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Webhook Controller + +Handles incoming webhook triggers with modular app-specific processing. +""" + +from fastapi import APIRouter, Depends, HTTPException, Request +from sqlmodel import Session, select, and_, or_ +from uuid import uuid4 +from datetime import datetime, timezone +import json +import logging +from fastapi_limiter.depends import RateLimiter + +from app.model.trigger.trigger import Trigger +from app.model.trigger.trigger_execution import TriggerExecution +from app.type.trigger_types import TriggerType, TriggerStatus, ExecutionType, ExecutionStatus +from app.component.database import session +from app.component.trigger_utils import check_rate_limits +from app.service.trigger.app_handler_service import get_app_handler + +logger = logging.getLogger("server_webhook_controller") + +router = APIRouter(prefix="/webhook", tags=["Webhook"]) + + +# Trigger types that use webhooks +WEBHOOK_TRIGGER_TYPES = [TriggerType.webhook, TriggerType.slack_trigger] + + +@router.api_route("/trigger/{webhook_uuid}", methods=["GET", "POST"], name="webhook trigger", dependencies=[Depends(RateLimiter(times=10, seconds=60))]) +async def webhook_trigger( + webhook_uuid: str, + request: Request, + db_session: Session = Depends(session) +): + """Handle incoming webhook triggers with app-specific processing.""" + try: + # Get request body + body = await request.body() + try: + input_data = json.loads(body) if body else {} + except json.JSONDecodeError: + input_data = {"raw_body": body.decode()} + + headers = dict(request.headers) + webhook_url = f"/webhook/trigger/{webhook_uuid}" + + # Find the trigger (allow active and pending_verification for verification flows) + trigger = db_session.exec( + select(Trigger).where( + and_( + Trigger.webhook_url == webhook_url, + Trigger.trigger_type.in_(WEBHOOK_TRIGGER_TYPES), + Trigger.status.in_([TriggerStatus.active, TriggerStatus.pending_verification]) + ) + ) + ).first() + + if not trigger: + logger.warning("Webhook trigger not found or inactive", extra={ + "webhook_uuid": webhook_uuid + }) + raise HTTPException(status_code=404, detail="Webhook not found or inactive") + + # Get app handler based on trigger_type + handler = get_app_handler(trigger.trigger_type) + + # App-specific authentication + if handler: + auth_result = await handler.authenticate(request, body, trigger, db_session) + + if not auth_result.success: + raise HTTPException(status_code=401, detail=auth_result.reason or "Invalid signature") + + # Return challenge response for URL verification (e.g., Slack) + # Don't update status yet - wait for actual events to confirm integration works + if auth_result.data: + logger.info("URL verification challenge received", extra={ + "trigger_id": trigger.id, + "trigger_type": trigger.trigger_type.value, + "status": trigger.status.value + }) + return auth_result.data + + # Update trigger status from pending_verification to active after receiving + # a real event (not just URL verification) with valid signature + if trigger.status == TriggerStatus.pending_verification: + trigger.status = TriggerStatus.active + db_session.add(trigger) + db_session.commit() + db_session.refresh(trigger) + logger.info("Trigger status updated to active after receiving valid event", extra={ + "trigger_id": trigger.id, + "trigger_type": trigger.trigger_type.value + }) + + # Notify Redis subscribers of successful activation + try: + from app.component.redis_utils import get_redis_manager + redis_manager = get_redis_manager() + redis_manager.publish_execution_event({ + "type": "trigger_activated", + "trigger_id": trigger.id, + "trigger_type": trigger.trigger_type.value, + "task_prompt": trigger.task_prompt, + "user_id": str(trigger.user_id), + "project_id": str(trigger.project_id), + "webhook_uuid": webhook_uuid + }) + except Exception as e: + logger.warning(f"Failed to publish activation event: {e}") + + # Default webhook: validate request method + if trigger.trigger_type == TriggerType.webhook and trigger.webhook_method: + expected_method = trigger.webhook_method.value if hasattr(trigger.webhook_method, 'value') else str(trigger.webhook_method) + expected_method = expected_method.rstrip(',') + if request.method.upper() != expected_method.upper(): + raise HTTPException( + status_code=405, + detail=f"Method not allowed. This webhook only accepts {expected_method} requests" + ) + + # Prepare request metadata for filtering and normalization + safe_headers = {k: v for k, v in headers.items() if k.lower() not in ['authorization', 'cookie']} + query_params = dict(request.query_params) + body_raw = body.decode() if body else "" + + request_meta = { + "headers": safe_headers, + "query_params": query_params, + "method": request.method, + "url": str(request.url), + "client_ip": request.client.host if request.client else None + } + + # App-specific event filtering (pass headers and body for webhook config filtering) + if handler: + # For default webhook handler, pass additional context + if trigger.trigger_type == TriggerType.webhook: + filter_result = await handler.filter_event( + input_data, + trigger, + headers=safe_headers, + body_raw=body_raw + ) + else: + filter_result = await handler.filter_event(input_data, trigger) + + if not filter_result.success: + logger.debug("Event filtered", extra={ + "trigger_id": trigger.id, + "reason": filter_result.reason + }) + return {"status": "ignored", "reason": filter_result.reason} + + # Check rate limits + current_time = datetime.now(timezone.utc) + if trigger.max_executions_per_hour or trigger.max_executions_per_day: + if not check_rate_limits(db_session, trigger): + logger.warning("Webhook rate limit exceeded", extra={ + "trigger_id": trigger.id + }) + raise HTTPException(status_code=429, detail="Rate limit exceeded") + + # Check single execution + if trigger.is_single_execution: + from sqlmodel import func + execution_count = db_session.exec( + select(func.count(TriggerExecution.id)).where( + TriggerExecution.trigger_id == trigger.id + ) + ).first() + if execution_count > 0: + raise HTTPException(status_code=409, detail="Single execution trigger already executed") + + # Normalize input data (pass request_meta for full webhook input) + if handler: + execution_input = handler.normalize_payload(input_data, trigger, request_meta=request_meta) + else: + execution_input = { + "headers": safe_headers, + "query_params": query_params, + "body": input_data, + "method": request.method, + "url": str(request.url), + "client_ip": request.client.host if request.client else None + } + + # Determine execution type + execution_type = handler.execution_type if handler else ExecutionType.webhook + + # Create execution record + execution_id = str(uuid4()) + execution = TriggerExecution( + trigger_id=trigger.id, + execution_id=execution_id, + execution_type=execution_type, + status=ExecutionStatus.pending, + input_data=execution_input, + started_at=current_time + ) + + db_session.add(execution) + + # Update trigger + trigger.last_executed_at = current_time + trigger.last_execution_status = "pending" + db_session.add(trigger) + db_session.commit() + db_session.refresh(execution) + + logger.info("Webhook trigger executed", extra={ + "trigger_id": trigger.id, + "execution_id": execution_id, + "trigger_type": trigger.trigger_type.value, + "user_id": trigger.user_id + }) + + # Notify WebSocket subscribers and wait for delivery confirmation + try: + from app.component.redis_utils import get_redis_manager + redis_manager = get_redis_manager() + + # Check if user has any active WebSocket sessions + has_active_sessions = redis_manager.has_active_sessions_for_user(str(trigger.user_id)) + + redis_manager.publish_execution_event({ + "type": "execution_created", + "execution_id": execution_id, + "trigger_id": trigger.id, + "trigger_type": trigger.trigger_type.value, + "task_prompt": trigger.task_prompt, + "status": "pending", + "input_data": execution_input, + "user_id": str(trigger.user_id), + "project_id": str(trigger.project_id) + }) + + if has_active_sessions: + # Wait for delivery confirmation (10 second timeout) + delivery_confirmation = await redis_manager.wait_for_delivery( + execution_id, + timeout=10.0 + ) + + if delivery_confirmation: + logger.info("Webhook delivery confirmed", extra={ + "execution_id": execution_id, + "session_id": delivery_confirmation.get("session_id") + }) + return { + "success": True, + "execution_id": execution_id, + "message": "Webhook trigger delivered to client", + "delivered": True, + "session_id": delivery_confirmation.get("session_id") + } + else: + logger.warning("Webhook delivery confirmation timed out", extra={ + "execution_id": execution_id, + "trigger_id": trigger.id + }) + return { + "success": True, + "execution_id": execution_id, + "message": "Webhook trigger processed but delivery not confirmed", + "delivered": False, + "reason": "timeout" + } + else: + # No active sessions, execution is queued + logger.info("No active WebSocket sessions for user", extra={ + "execution_id": execution_id, + "user_id": trigger.user_id + }) + return { + "success": True, + "execution_id": execution_id, + "message": "Webhook trigger processed, no active client connected", + "delivered": False, + "reason": "no_active_sessions" + } + except Exception as e: + logger.warning(f"Failed to publish/confirm WebSocket event: {e}") + return { + "success": True, + "execution_id": execution_id, + "message": "Webhook trigger processed but WebSocket notification failed", + "delivered": False, + "reason": str(e) + } + + except HTTPException: + raise + except Exception as e: + logger.error("Webhook trigger processing failed", extra={ + "webhook_uuid": webhook_uuid, + "error": str(e) + }, exc_info=True) + raise HTTPException(status_code=500, detail="Internal server error") + + +@router.get("/trigger/{webhook_uuid}/info", name="webhook info") +def get_webhook_info( + webhook_uuid: str, + db_session: Session = Depends(session) +): + """Get information about a webhook trigger (public endpoint).""" + webhook_url = f"/webhook/trigger/{webhook_uuid}" + + trigger = db_session.exec( + select(Trigger).where( + and_( + Trigger.webhook_url == webhook_url, + Trigger.trigger_type.in_(WEBHOOK_TRIGGER_TYPES) + ) + ) + ).first() + + if not trigger: + raise HTTPException(status_code=404, detail="Webhook not found") + + return { + "name": trigger.name, + "description": trigger.description, + "status": trigger.status.value, + "trigger_type": trigger.trigger_type.value, + "is_active": trigger.status == TriggerStatus.active, + "webhook_method": trigger.webhook_method.value if trigger.webhook_method else None, + "last_executed_at": trigger.last_executed_at.isoformat() if trigger.last_executed_at else None, + } + + diff --git a/server/app/model/chat/chat_history_grouped.py b/server/app/model/chat/chat_history_grouped.py index 9729382e9..c5f68c95a 100644 --- a/server/app/model/chat/chat_history_grouped.py +++ b/server/app/model/chat/chat_history_grouped.py @@ -32,6 +32,7 @@ class ProjectGroup(BaseModel): total_completed_tasks: int = 0 total_ongoing_tasks: int = 0 average_tokens_per_task: int = 0 + total_triggers: int = 0 @model_validator(mode="after") def calculate_averages(self): diff --git a/server/app/model/trigger/__init__.py b/server/app/model/trigger/__init__.py new file mode 100644 index 000000000..c8ddbe584 --- /dev/null +++ b/server/app/model/trigger/__init__.py @@ -0,0 +1,28 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +"""Trigger models package.""" +from app.model.trigger.trigger import Trigger, TriggerIn, TriggerUpdate, TriggerOut, TriggerConfigSchemaOut +from app.model.trigger.trigger_execution import TriggerExecution, TriggerExecutionIn, TriggerExecutionUpdate + +__all__ = [ + "Trigger", + "TriggerIn", + "TriggerUpdate", + "TriggerOut", + "TriggerExecution", + "TriggerExecutionIn", + "TriggerExecutionUpdate", + "TriggerConfigSchemaOut" +] diff --git a/server/app/model/trigger/app_configs/__init__.py b/server/app/model/trigger/app_configs/__init__.py new file mode 100644 index 000000000..933448356 --- /dev/null +++ b/server/app/model/trigger/app_configs/__init__.py @@ -0,0 +1,64 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +App Trigger Configuration Models + +This package contains configuration models for different trigger app types. +""" + +from app.model.trigger.app_configs.base_config import ( + BaseTriggerConfig, + ActivationError, +) +from app.model.trigger.app_configs.slack_config import ( + SlackEventType, + SlackTriggerConfig, +) +from app.model.trigger.app_configs.webhook_config import ( + WebhookTriggerConfig, +) +from app.model.trigger.app_configs.schedule_config import ( + ScheduleTriggerConfig, +) +from app.model.trigger.app_configs.config_registry import ( + get_config_class, + get_config_schema, + validate_config, + register_config_class, + get_supported_config_types, + has_config, + validate_activation, +) + +__all__ = [ + # Base config + "BaseTriggerConfig", + "ActivationError", + # Slack config + "SlackEventType", + "SlackTriggerConfig", + # Webhook config + "WebhookTriggerConfig", + # Schedule config + "ScheduleTriggerConfig", + # Registry functions + "get_config_class", + "get_config_schema", + "validate_config", + "register_config_class", + "get_supported_config_types", + "has_config", + "validate_activation", +] \ No newline at end of file diff --git a/server/app/model/trigger/app_configs/base_config.py b/server/app/model/trigger/app_configs/base_config.py new file mode 100644 index 000000000..9e10d306f --- /dev/null +++ b/server/app/model/trigger/app_configs/base_config.py @@ -0,0 +1,253 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Base Trigger Configuration Models + +Base configuration models that all app-specific trigger configs should extend from. +Contains common fields and validation logic shared across all trigger types. +""" + +import re +from typing import Optional, List, Dict, Any, TYPE_CHECKING +from pydantic import BaseModel, Field, field_validator + +from app.type.config_group import ConfigGroup + +if TYPE_CHECKING: + from sqlmodel import Session + + +class ActivationError(Exception): + """Exception raised when trigger activation requirements are not met.""" + def __init__(self, message: str, missing_requirements: List[str] = None): + self.message = message + self.missing_requirements = missing_requirements or [] + super().__init__(self.message) + + +class BaseTriggerConfig(BaseModel): + """ + Base trigger configuration that all app-specific configs should extend. + + Contains common fields like message filtering and authentication requirements + that are shared across different trigger types. + """ + + # Authentication Configuration + authentication_required: bool = Field( + default=False, + description="Whether authentication is required for this trigger", + json_schema_extra={ + "ui:widget": "switch", + "ui:label": "triggers.base.authentication_required.label", + "ui:notice": "triggers.base.authentication_required.notice" + }, + ) + + # Auto-disable Configuration + max_failure_count: Optional[int] = Field( + default=None, + description="Maximum consecutive failures before auto-disabling the trigger. Set to None to disable this feature.", + ge=1, + le=100, + json_schema_extra={ + "ui:widget": "number-input", + "ui:label": "triggers.base.max_failure_count.label", + "ui:placeholder": "triggers.base.max_failure_count.placeholder", + "ui:notice": "triggers.base.max_failure_count.notice" + }, + ) + + # Common Filtering Options + message_filter: Optional[str] = Field( + default=None, + description="Regex pattern to filter incoming messages/events", + json_schema_extra={ + "ui:label": "triggers.base.message_filter.label", + "ui:widget": "text-input", + "ui:placeholder": "triggers.base.message_filter.placeholder", + "ui:notice": "triggers.base.message_filter.notice", + "ui:validation": "regex", + "maxLength": 500 + }, + ) + + @field_validator("message_filter") + @classmethod + def validate_regex(cls, v): + """Validate that the message_filter is a valid regex pattern.""" + if v is None: + return v + try: + re.compile(v) + except re.error as e: + raise ValueError(f"Invalid regex: {e}") + return v + + def matches_filter(self, text: Optional[str]) -> bool: + """ + Check if the given text matches the message filter. + + Args: + text: The text to check against the filter + + Returns: + True if no filter is set, or if the text matches the filter + """ + if self.message_filter is None or text is None: + return True + + pattern = re.compile(self.message_filter) + return bool(pattern.search(text)) + + def should_auto_disable(self, consecutive_failures: int) -> bool: + """ + Check if the trigger should be auto-disabled based on failure count. + + Args: + consecutive_failures: The current number of consecutive failures + + Returns: + True if the trigger should be disabled, False otherwise + """ + if self.max_failure_count is None: + return False + return consecutive_failures >= self.max_failure_count + + def get_required_config_group(self) -> Optional[ConfigGroup]: + """ + Get the config group required for this trigger type. + + Override this in subclasses to specify the config group (e.g., ConfigGroup.SLACK). + This leverages the same ConfigGroup enum used by toolkits, ensuring triggers + and toolkits for the same service share credentials. + + Returns: + The ConfigGroup enum value, or None if no config group is required + """ + return None + + def get_required_credentials(self) -> List[str]: + """ + Get the list of required credential names for this trigger. + + Override this in subclasses to specify required credentials. + + Returns: + List of credential names that must be present (e.g., ["SLACK_BOT_TOKEN"]) + """ + return [] + + # Built in, depends on ConfigInfo from models/config/config.py + def check_activation_requirements( + self, + user_id: int, + session: "Session" + ) -> Dict[str, Any]: + """ + Check if all activation requirements are met for this trigger. + + Args: + user_id: The ID of the user who owns the trigger + session: Database session for querying credentials + + Returns: + Dict with: + - can_activate: bool - whether the trigger can be activated + - missing_requirements: list - list of missing requirements + - message: str - human-readable status message + + Raises: + ActivationError: If activation requirements are not met + """ + if not self.authentication_required: + return { + "can_activate": True, + "missing_requirements": [], + "message": "Authentication not required" + } + + config_group = self.get_required_config_group() + required_credentials = self.get_required_credentials() + + if not config_group or not required_credentials: + return { + "can_activate": True, + "missing_requirements": [], + "message": "No specific credentials required" + } + + # Import here to avoid circular imports + from sqlmodel import select, and_ + from app.model.config.config import Config + + # Query for user's credentials in the required config group + # Use config_group.value since config_group is now a ConfigGroup enum + configs = session.exec( + select(Config).where( + and_( + Config.user_id == int(user_id), + Config.config_group == config_group.value + ) + ) + ).all() + + available_credentials = { + config.config_name: config.config_value + for config in configs + if config.config_value # Only count non-empty values + } + + missing = [ + cred for cred in required_credentials + if cred not in available_credentials + ] + + if missing: + return { + "can_activate": False, + "missing_requirements": missing, + "message": f"Missing required credentials: {', '.join(missing)}" + } + + return { + "can_activate": True, + "missing_requirements": [], + "message": "All requirements met" + } + + def validate_activation(self, user_id: int, session: "Session") -> None: + """ + Validate that the trigger can be activated. + + Args: + user_id: The ID of the user who owns the trigger + session: Database session for querying credentials + + Raises: + ActivationError: If activation requirements are not met + """ + result = self.check_activation_requirements(user_id, session) + + if not result["can_activate"]: + raise ActivationError( + message=result["message"], + missing_requirements=result["missing_requirements"] + ) + + @classmethod + def validate_config(cls, config_data: dict) -> "BaseTriggerConfig": + """Validate and return a config instance.""" + return cls(**config_data) diff --git a/server/app/model/trigger/app_configs/config_registry.py b/server/app/model/trigger/app_configs/config_registry.py new file mode 100644 index 000000000..9f3c21baa --- /dev/null +++ b/server/app/model/trigger/app_configs/config_registry.py @@ -0,0 +1,159 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Trigger Config Registry + +Registry for mapping trigger types to their configuration classes. +Used for validation and JSON schema generation. +""" + +from typing import Type, Optional, Dict, Any, TYPE_CHECKING + +from app.type.trigger_types import TriggerType +from app.model.trigger.app_configs.base_config import BaseTriggerConfig +from app.model.trigger.app_configs.slack_config import SlackTriggerConfig +from app.model.trigger.app_configs.webhook_config import WebhookTriggerConfig +from app.model.trigger.app_configs.schedule_config import ScheduleTriggerConfig + +if TYPE_CHECKING: + from sqlmodel import Session + + +# Registry of trigger types to their config classes +_CONFIG_REGISTRY: Dict[TriggerType, Type[BaseTriggerConfig]] = { + TriggerType.slack_trigger: SlackTriggerConfig, + TriggerType.webhook: WebhookTriggerConfig, + TriggerType.schedule: ScheduleTriggerConfig, +} + + +def get_config_class(trigger_type: TriggerType) -> Optional[Type[BaseTriggerConfig]]: + """ + Get the config class for a trigger type. + + Args: + trigger_type: The trigger type to get config class for + + Returns: + The Pydantic model class for the trigger config, or None if not found + """ + return _CONFIG_REGISTRY.get(trigger_type) + + +def register_config_class(trigger_type: TriggerType, config_class: Type[BaseTriggerConfig]): + """ + Register a config class for a trigger type. + + Args: + trigger_type: The trigger type to register + config_class: The Pydantic model class for the trigger config + """ + _CONFIG_REGISTRY[trigger_type] = config_class + + +def get_config_schema(trigger_type: TriggerType) -> Optional[Dict[str, Any]]: + """ + Get the JSON schema for a trigger type's config. + + Args: + trigger_type: The trigger type to get schema for + + Returns: + The JSON schema dict, or None if no config class is registered + """ + config_class = get_config_class(trigger_type) + if config_class: + return config_class.model_json_schema() + return None + + +def validate_config(trigger_type: TriggerType, config_data: Optional[dict]) -> Optional[BaseTriggerConfig]: + """ + Validate config data against the registered config class. + + Args: + trigger_type: The trigger type to validate for + config_data: The config data to validate + + Returns: + The validated Pydantic model instance, or None if no config class is registered + + Raises: + ValidationError: If the config data is invalid + """ + if config_data is None: + return None + + config_class = get_config_class(trigger_type) + if config_class: + return config_class(**config_data) + return None + + +def get_supported_config_types() -> list[TriggerType]: + """Get list of trigger types that have config classes registered.""" + return list(_CONFIG_REGISTRY.keys()) + + +def has_config(trigger_type: TriggerType) -> bool: + """Check if a trigger type has a config class registered.""" + return trigger_type in _CONFIG_REGISTRY + + +def requires_authentication(trigger_type: TriggerType, config_data: Optional[dict] = None) -> bool: + """ + Check if a trigger type requires authentication. + + Args: + trigger_type: The trigger type to check + config_data: Optional config data to check against + + Returns: + True if authentication is required, False otherwise + """ + config_class = get_config_class(trigger_type) + + if not config_class: + return False + + config = config_class(**(config_data or {})) + return config.authentication_required + + +def validate_activation( + trigger_type: TriggerType, + config_data: Optional[dict], + user_id: int, + session: "Session" +) -> None: + """ + Validate that a trigger can be activated. Raises an exception if not. + + Args: + trigger_type: The trigger type to validate + config_data: The config data for the trigger + user_id: The ID of the user who owns the trigger + session: Database session for querying credentials + + Raises: + ActivationError: If activation requirements are not met + """ + config_class = get_config_class(trigger_type) + + if not config_class: + return # No requirements to check + + config = config_class(**(config_data or {})) + config.validate_activation(user_id, session) \ No newline at end of file diff --git a/server/app/model/trigger/app_configs/schedule_config.py b/server/app/model/trigger/app_configs/schedule_config.py new file mode 100644 index 000000000..f0a5e7d26 --- /dev/null +++ b/server/app/model/trigger/app_configs/schedule_config.py @@ -0,0 +1,147 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Schedule Trigger Configuration Models + +Minimal configuration for scheduled triggers. Schedule details (time, day, weekday) +are handled by custom_cron_expression. This config only handles: +- date: For one-time executions (cron has no year) +- expirationDate: For recurring schedules with an end date +""" + +from datetime import datetime, timezone +from typing import Optional, Tuple +from pydantic import Field, field_validator + +from app.model.trigger.app_configs.base_config import BaseTriggerConfig + + +class ScheduleTriggerConfig(BaseTriggerConfig): + """ + Minimal schedule trigger configuration. + + The cron expression handles time, day, weekday, and month scheduling. + This config only handles what cron cannot: + - date: Full date for one-time execution (cron has no year) + - expirationDate: End date for recurring schedules + + Examples: + Once (One-time execution): + { + "date": "2026-03-15" + } + + Daily/Weekly/Monthly (no expiration): + {} + + Daily/Weekly/Monthly (with expiration): + { + "expirationDate": "2026-06-30" + } + """ + + # Date for one-time execution (YYYY-MM-DD format) + # Required when is_single_execution=True because cron has no year + date: Optional[str] = Field( + default=None, + description="Full date for one-time execution (YYYY-MM-DD). Required for is_single_execution=True since cron has no year." + ) + + # Expiration date for recurring schedules (YYYY-MM-DD format) + expirationDate: Optional[str] = Field( + default=None, + description="End date for recurring schedules (YYYY-MM-DD). Schedule will be marked as completed after this date." + ) + + @field_validator("date") + @classmethod + def validate_date_format(cls, v: Optional[str]) -> Optional[str]: + """Validate that date is in YYYY-MM-DD format.""" + if v is None: + return None + + try: + datetime.strptime(v, "%Y-%m-%d") + except ValueError: + raise ValueError("Date must be in YYYY-MM-DD format") + + return v + + @field_validator("expirationDate") + @classmethod + def validate_expiration_date_format(cls, v: Optional[str]) -> Optional[str]: + """Validate that expiration date is in YYYY-MM-DD format.""" + if v is None: + return None + + try: + datetime.strptime(v, "%Y-%m-%d") + except ValueError: + raise ValueError("Expiration date must be in YYYY-MM-DD format") + + return v + + def is_expired(self, check_date: Optional[datetime] = None) -> bool: + """ + Check if the schedule has expired. + + For one-time (date is set): Check if date has passed + For recurring (expirationDate is set): Check if expiration date has passed + + Args: + check_date: Date to check against (defaults to now) + + Returns: + True if the schedule has expired, False otherwise + """ + if check_date is None: + check_date = datetime.now(timezone.utc) + + # One-time execution: check if date has passed + if self.date: + execution_date = datetime.strptime(self.date, "%Y-%m-%d").replace( + hour=23, minute=59, second=59, tzinfo=timezone.utc + ) + return check_date > execution_date + + # Recurring with expiration: check if expiration date has passed + if self.expirationDate: + expiration = datetime.strptime(self.expirationDate, "%Y-%m-%d").replace( + hour=23, minute=59, second=59, tzinfo=timezone.utc + ) + return check_date > expiration + + # No expiration set + return False + + def should_execute(self, check_date: Optional[datetime] = None) -> Tuple[bool, str]: + """ + Check if the schedule should execute. + + Args: + check_date: Date to check against (defaults to now) + + Returns: + Tuple of (should_execute, reason) + """ + if self.is_expired(check_date): + return False, "schedule_expired" + + return True, "ok" + + @classmethod + def validate_config(cls, config_data: dict) -> "ScheduleTriggerConfig": + """Validate and return a ScheduleTriggerConfig instance.""" + return cls(**config_data) diff --git a/server/app/model/trigger/app_configs/slack_config.py b/server/app/model/trigger/app_configs/slack_config.py new file mode 100644 index 000000000..328c0ed98 --- /dev/null +++ b/server/app/model/trigger/app_configs/slack_config.py @@ -0,0 +1,181 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Slack Trigger Configuration Models + +Configuration models for Slack webhook triggers. These are stored in the +trigger's config field and used by the webhook controller for +app-specific event handling. +""" + +from enum import StrEnum +from typing import Optional, List, TYPE_CHECKING +from pydantic import Field + +from app.model.trigger.app_configs.base_config import BaseTriggerConfig +from app.type.config_group import ConfigGroup + +if TYPE_CHECKING: + from sqlmodel import Session + + +class SlackEventType(StrEnum): + """Slack event types that can trigger the workflow""" + ANY = "any_event" + APP_MENTION = "app_mention" + MESSAGE = "message" + FILE_SHARED = "file_shared" + FILE_PUBLIC = "file_public" + CHANNEL_CREATED = "channel_created" + CHANNEL_ARCHIVE = "channel_archive" + CHANNEL_UNARCHIVE = "channel_unarchive" + CHANNEL_RENAME = "channel_rename" + MEMBER_JOINED_CHANNEL = "member_joined_channel" + MEMBER_LEFT_CHANNEL = "member_left_channel" + TEAM_JOIN = "team_join" + REACTION_ADDED = "reaction_added" + REACTION_REMOVED = "reaction_removed" + PIN_ADDED = "pin_added" + PIN_REMOVED = "pin_removed" + APP_HOME_OPENED = "app_home_opened" + + +class SlackTriggerConfig(BaseTriggerConfig): + """ + Slack-specific trigger configuration. + + Extends BaseTriggerConfig with Slack-specific fields for event handling, + channel filtering, and bot message handling. + """ + # Override: Slack triggers require authentication + authentication_required: bool = Field( + default=True, + description="Whether authentication is required for this trigger", + json_schema_extra={ + "ui:widget": "switch", + "ui:label": "triggers.slack.authentication_required.label", + "ui:notice": "triggers.slack.authentication_required.notice", + "hidden": True + }, + ) + + # API Key + SLACK_BOT_TOKEN: Optional[str] = Field( + default=None, + description="Slack Bot Token for API access", + json_schema_extra={ + "ui:label": "triggers.slack.bot_token.label", + "ui:widget": "text-input", + "ui:widget:type": "secret", + "ui:placeholder": "triggers.slack.bot_token.placeholder", + "ui:notice": "triggers.slack.bot_token.notice", + "minLength": 20, + "maxLength": 200, + "pattern": "^xoxb-", + "api:GET": f"/configs?config_group={ConfigGroup.SLACK.value}", + "api:POST": "/configs", + "api:PUT": "/configs/{config_id}", + "config_group": ConfigGroup.SLACK.value, + "exclude": True # Exclude from saving to trigger/config + }, + ) + SLACK_SIGNING_SECRET: Optional[str] = Field( + default=None, + description="Slack Signing Secret for API request verification", + json_schema_extra={ + "ui:label": "triggers.slack.signing_secret.label", + "ui:widget": "text-input", + "ui:widget:type": "secret", + "ui:placeholder": "triggers.slack.signing_secret.placeholder", + "ui:notice": "triggers.slack.signing_secret.notice", + "minLength": 32, + "maxLength": 64, + "pattern": "^[a-f0-9]+$", + "api:GET": f"/configs?config_group={ConfigGroup.SLACK.value}", + "api:POST": "/configs", + "api:PUT": "/configs/{config_id}", + "config_group": ConfigGroup.SLACK.value, + "exclude": True # Exclude from saving to trigger/config + }, + ) + + # Event Selection + events: List[SlackEventType] = Field( + default=[SlackEventType.MESSAGE], + description="Slack event types to trigger on", + json_schema_extra={ + "ui:label": "triggers.slack.events.label", + "ui:widget": "multi-select", + "ui:options": [{"label": e.value, "value": e.value} for e in SlackEventType], + "ui:notice": "triggers.slack.events.notice" + } + ) + + # Channel Configuration + channel_id: Optional[str] = Field( + default=None, + description="Specific channel ID to watch", + json_schema_extra={ + "ui:label": "triggers.slack.channel_id.label", + "ui:widget": "multi-select", + "ui:options": ["fetch channel IDs from Slack API"], + "ui:placeholder": "triggers.slack.channel_id.placeholder", + "pattern": "^C[A-Z0-9]{8,}$", + "api:GET": "trigger/slack/channels", + "ui:notice": "triggers.slack.channel_id.notice", + "hidden": True + }, + ) + + # Slack-Specific Filtering Options + ignore_bot_messages: bool = Field( + default=True, + description="Ignore messages from bots", + json_schema_extra={ + "ui:widget": "switch", + "ui:label": "triggers.slack.ignore_bot_messages.label", + }, + ) + + ignore_users: List[str] = Field( + default=[], + description="User IDs to ignore", + json_schema_extra={ + "ui:label": "triggers.slack.ignore_users.label", + "ui:widget": "multi-text-input", + "ui:placeholder": "triggers.slack.ignore_users.placeholder", + "ui:notice": "triggers.slack.ignore_users.notice", + "pattern": "^U[A-Z0-9]{8,}$" + }, + ) + + def get_required_config_group(self) -> ConfigGroup: + """Get the config group required for Slack triggers.""" + return ConfigGroup.SLACK + + def get_required_credentials(self) -> List[str]: + """Get the list of required Slack credentials.""" + return ["SLACK_BOT_TOKEN", "SLACK_SIGNING_SECRET"] + + def should_trigger(self, event_type: str) -> bool: + """Check if this event type should trigger the workflow.""" + if "any_event" in self.events: + return True + return event_type in self.events + + @classmethod + def validate_config(cls, config_data: dict) -> "SlackTriggerConfig": + """Validate and return a SlackTriggerConfig instance.""" + return cls(**config_data) \ No newline at end of file diff --git a/server/app/model/trigger/app_configs/webhook_config.py b/server/app/model/trigger/app_configs/webhook_config.py new file mode 100644 index 000000000..6a4957add --- /dev/null +++ b/server/app/model/trigger/app_configs/webhook_config.py @@ -0,0 +1,219 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Webhook Trigger Configuration Models + +Configuration models for generic webhook triggers. These are stored in the +trigger's config field and used by the webhook controller for +request filtering and payload normalization. +""" + +import re +from typing import Optional, List +from pydantic import Field + +from app.model.trigger.app_configs.base_config import BaseTriggerConfig + + +class WebhookTriggerConfig(BaseTriggerConfig): + """ + Generic webhook trigger configuration. + + Extends BaseTriggerConfig with webhook-specific fields for filtering + incoming requests based on headers, body content, or custom patterns. + """ + + # Override authentication_required to default to False for generic webhooks + authentication_required: bool = Field( + default=False, + description="Whether authentication is required for this trigger", + json_schema_extra={ + "ui:widget": "switch", + "ui:label": "triggers.webhook.authentication_required.label", + "ui:notice": "triggers.webhook.authentication_required.notice" + }, + ) + + # Content filtering + body_contains: Optional[str] = Field( + default=None, + description="Only trigger if the request body contains this string", + json_schema_extra={ + "ui:label": "triggers.webhook.body_contains.label", + "ui:widget": "text-input", + "ui:placeholder": "triggers.webhook.body_contains.placeholder", + "ui:notice": "triggers.webhook.body_contains.notice", + "minLength": 1, + "maxLength": 500 + }, + ) + + # Header filtering + required_headers: List[str] = Field( + default=[], + description="List of headers that must be present in the request", + json_schema_extra={ + "ui:label": "triggers.webhook.required_headers.label", + "ui:widget": "multi-text-input", + "ui:placeholder": "triggers.webhook.required_headers.placeholder", + "ui:notice": "triggers.webhook.required_headers.notice", + "pattern": "^[A-Za-z0-9-]+$", + "maxLength": 100 + }, + ) + + header_match: Optional[str] = Field( + default=None, + description="Regex pattern to match against request headers (format: Header-Name: pattern)", + json_schema_extra={ + "ui:label": "triggers.webhook.header_match.label", + "ui:widget": "text-input", + "ui:placeholder": "triggers.webhook.header_match.placeholder", + "ui:notice": "triggers.webhook.header_match.notice", + "ui:validation": "regex", + "maxLength": 500 + }, + ) + + # Include full request metadata + include_headers: bool = Field( + default=False, + description="Include request headers in the execution input", + json_schema_extra={ + "ui:widget": "switch", + "ui:label": "triggers.webhook.include_headers.label", + "ui:notice": "triggers.webhook.include_headers.notice" + }, + ) + + include_query_params: bool = Field( + default=True, + description="Include query parameters in the execution input", + json_schema_extra={ + "ui:widget": "switch", + "ui:label": "triggers.webhook.include_query_params.label", + }, + ) + + include_request_metadata: bool = Field( + default=False, + description="Include request metadata (method, URL, client IP) in execution input", + json_schema_extra={ + "ui:widget": "switch", + "ui:label": "triggers.webhook.include_request_metadata.label", + "ui:notice": "triggers.webhook.include_request_metadata.notice" + }, + ) + + def matches_body_filter(self, body: str) -> bool: + """ + Check if the body matches the body_contains filter. + + Args: + body: The request body as string + + Returns: + True if no filter is set, or if the body contains the filter string + """ + if self.body_contains is None: + return True + return self.body_contains in body + + def has_required_headers(self, headers: dict) -> bool: + """ + Check if all required headers are present. + + Args: + headers: Dict of request headers (case-insensitive check) + + Returns: + True if all required headers are present + """ + if not self.required_headers: + return True + + # Normalize headers to lowercase for comparison + lower_headers = {k.lower(): v for k, v in headers.items()} + + for required in self.required_headers: + if required.lower() not in lower_headers: + return False + return True + + def matches_header_pattern(self, headers: dict) -> bool: + """ + Check if headers match the header_match pattern. + + Args: + headers: Dict of request headers + + Returns: + True if no pattern is set, or if headers match the pattern + """ + if self.header_match is None: + return True + + # Parse pattern: "Header-Name: pattern" + if ":" not in self.header_match: + return True + + header_name, pattern = self.header_match.split(":", 1) + header_name = header_name.strip() + pattern = pattern.strip() + + # Find the header (case-insensitive) + for key, value in headers.items(): + if key.lower() == header_name.lower(): + try: + return bool(re.search(pattern, str(value), re.IGNORECASE)) + except re.error: + return False + + return False # Header not found + + def should_trigger(self, body: str, headers: dict, text: Optional[str] = None) -> tuple[bool, str]: + """ + Check if all webhook filters pass. + + Args: + body: Request body as string + headers: Request headers dict + text: Optional text content to check against message_filter + + Returns: + Tuple of (should_trigger, reason) + """ + # Check message_filter from base class + if not self.matches_filter(text): + return False, "message_filter_not_matched" + + # Check body_contains + if not self.matches_body_filter(body): + return False, "body_filter_not_matched" + + # Check required headers + if not self.has_required_headers(headers): + return False, "required_headers_missing" + + # Check header pattern + if not self.matches_header_pattern(headers): + return False, "header_pattern_not_matched" + + return True, "ok" + + @classmethod + def validate_config(cls, config_data: dict) -> "WebhookTriggerConfig": + """Validate and return a WebhookTriggerConfig instance.""" + return cls(**config_data) diff --git a/server/app/model/trigger/trigger.py b/server/app/model/trigger/trigger.py new file mode 100644 index 000000000..8a59f1867 --- /dev/null +++ b/server/app/model/trigger/trigger.py @@ -0,0 +1,203 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from datetime import datetime +from typing import Any, Dict, Optional +from sqlmodel import Field, Column, SmallInteger, JSON, String +from sqlalchemy_utils import ChoiceType +from pydantic import BaseModel +from app.model.abstract.model import AbstractModel, DefaultTimes +from app.type.trigger_types import TriggerType, TriggerStatus, ListenerType, RequestType + + +class Trigger(AbstractModel, DefaultTimes, table=True): + """Trigger model for automated task execution""" + + id: int = Field(default=None, primary_key=True) + user_id: str = Field(index=True, description="User ID who owns this trigger") + project_id: str = Field(index=True, description="Project ID this trigger belongs to") + name: str = Field(max_length=100, description="Human readable name for the trigger") + description: str = Field(default="", max_length=1000, description="Description of what this trigger does") + + # Trigger configuration + trigger_type: TriggerType = Field( + sa_column=Column(ChoiceType(TriggerType, String(50))), + description="Type of trigger (schedule, webhook, slack_trigger)" + ) + status: TriggerStatus = Field( + default=TriggerStatus.inactive, + sa_column=Column(ChoiceType(TriggerStatus, String(50))), + description="Current status of the trigger" + ) + + # Webhook specific fields + webhook_url: Optional[str] = Field( + default=None, + sa_column=Column(String(1024)), + description="Auto-generated webhook URL for webhook triggers" + ) + webhook_method: Optional[RequestType] = Field( + default=None, + sa_column=Column(ChoiceType(RequestType, String(50))), + description="Http/s Request Type" + ) + + # Schedule specific fields + custom_cron_expression: Optional[str] = Field( + default=None, + sa_column=Column(String(100)), + description="Custom cron expression for scheduled triggers" + ) + + # Listener configuration + listener_type: Optional[ListenerType] = Field( + default=None, + sa_column=Column(ChoiceType(ListenerType, String(50))), + description="Type of listener (workforce, chat_agent)" + ) + + agent_model: Optional[str] = Field( + default=None, + sa_column=Column(String(100)), + description="Model to use for the agent" + ) + + # Task configuration + task_prompt: Optional[str] = Field( + default=None, + max_length=1500, + description="Prompt template for tasks created by this trigger" + ) + + # Trigger-type specific configuration (validated based on trigger_type) + config: Optional[dict] = Field( + default=None, + sa_column=Column(JSON), + description="Trigger-type specific configuration (e.g., SlackTriggerConfig)" + ) + + # Execution limits + max_executions_per_hour: Optional[int] = Field( + default=None, + description="Maximum executions allowed per hour" + ) + max_executions_per_day: Optional[int] = Field( + default=None, + description="Maximum executions allowed per day" + ) + is_single_execution: bool = Field( + default=False, + description="Whether this trigger should only execute once" + ) + + # Execution tracking + last_executed_at: Optional[datetime] = Field( + default=None, + description="Timestamp of last execution" + ) + next_run_at: Optional[datetime] = Field( + default=None, + index=True, + description="Timestamp of next scheduled execution" + ) + last_execution_status: Optional[str] = Field( + default=None, + sa_column=Column(String(50)), + description="Status of the last execution" + ) + consecutive_failures: int = Field( + default=0, + description="Number of consecutive execution failures" + ) + auto_disabled_at: Optional[datetime] = Field( + default=None, + description="Timestamp when trigger was auto-disabled due to max failures" + ) + + +class TriggerIn(BaseModel): + """Input model for creating triggers""" + name: str = Field(max_length=100) + description: str = Field(default="", max_length=1000) + project_id: str + trigger_type: TriggerType + custom_cron_expression: Optional[str] = None + listener_type: Optional[ListenerType] = None + agent_model: Optional[str] = None + task_prompt: Optional[str] = Field(default=None, max_length=1500) + config: Optional[dict] = None # Trigger-type specific config + max_executions_per_hour: Optional[int] = None + max_executions_per_day: Optional[int] = None + is_single_execution: bool = False + webhook_method: Optional[RequestType] = None + + +class TriggerUpdate(BaseModel): + """Model for updating triggers""" + name: Optional[str] = Field(default=None, max_length=100) + description: Optional[str] = Field(default=None, max_length=1000) + status: Optional[TriggerStatus] = None + custom_cron_expression: Optional[str] = None + listener_type: Optional[ListenerType] = None + agent_model: Optional[str] = None + task_prompt: Optional[str] = Field(default=None, max_length=1500) + config: Optional[dict] = None # Trigger-type specific config + max_executions_per_hour: Optional[int] = None + max_executions_per_day: Optional[int] = None + is_single_execution: Optional[bool] = None + webhook_method: Optional[RequestType] = None + + +class TriggerOut(BaseModel): + """Output model for trigger responses""" + id: int + user_id: str + project_id: str + name: str + description: str + trigger_type: TriggerType + status: TriggerStatus + execution_count: int = 0 + webhook_url: Optional[str] = None + webhook_method: Optional[RequestType] = None + custom_cron_expression: Optional[str] = None + listener_type: Optional[ListenerType] = None + agent_model: Optional[str] = None + task_prompt: Optional[str] = None + config: Optional[dict] = None # Trigger-type specific config + max_executions_per_hour: Optional[int] = None + max_executions_per_day: Optional[int] = None + is_single_execution: bool + last_executed_at: Optional[datetime] = None + next_run_at: Optional[datetime] = None + last_execution_status: Optional[str] = None + consecutive_failures: int = 0 + auto_disabled_at: Optional[datetime] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None + + +class TriggerConfigSchemaOut(BaseModel): + """Output model for trigger config schema.""" + trigger_type: str + has_config: bool + schema_: Optional[Dict[str, Any]] = None + + class Config: + populate_by_name = True + json_schema_extra = { + "properties": { + "schema": {"$ref": "#/definitions/schema_"} + } + } diff --git a/server/app/model/trigger/trigger_execution.py b/server/app/model/trigger/trigger_execution.py new file mode 100644 index 000000000..75c48d89d --- /dev/null +++ b/server/app/model/trigger/trigger_execution.py @@ -0,0 +1,134 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from datetime import datetime +from typing import Optional +from sqlmodel import Field, Column, SmallInteger, JSON, String, Float +from sqlalchemy_utils import ChoiceType +from pydantic import BaseModel +from app.model.abstract.model import AbstractModel, DefaultTimes +from app.type.trigger_types import ExecutionType, ExecutionStatus + + +class TriggerExecution(AbstractModel, DefaultTimes, table=True): + """Output model for execution records""" + + id: int = Field(default=None, primary_key=True) + trigger_id: int = Field(foreign_key="trigger.id", index=True, description="ID of the trigger that created this execution") + execution_id: str = Field(unique=True, index=True, description="Unique execution identifier") + + execution_type: ExecutionType = Field( + sa_column=Column(ChoiceType(ExecutionType, String(50))), + description="Type of execution (scheduled, webhook)" + ) + status: ExecutionStatus = Field( + default=ExecutionStatus.pending, + sa_column=Column(ChoiceType(ExecutionStatus, String(50))), + description="Current status of the execution" + ) + + # Execution timing + started_at: Optional[datetime] = Field( + default=None, + description="Timestamp when execution started" + ) + completed_at: Optional[datetime] = Field( + default=None, + description="Timestamp when execution completed" + ) + duration_seconds: Optional[float] = Field( + default=None, + sa_column=Column(Float), + description="Duration of execution in seconds" + ) + + # Execution data + input_data: Optional[dict] = Field( + default=None, + sa_column=Column(JSON), + description="Input data that triggered the execution" + ) + output_data: Optional[dict] = Field( + default=None, + sa_column=Column(JSON), + description="Output data from the execution" + ) + error_message: Optional[str] = Field( + default=None, + description="Error message if execution failed" + ) + + # Retry configuration + attempts: int = Field( + default=1, + description="Current number of retry attempts" + ) + max_retries: int = Field( + default=3, + description="Maximum number of retry attempts" + ) + + # Resource usage tracking + tokens_used: Optional[int] = Field( + default=None, + description="Number of tokens used during execution" + ) + tools_executed: Optional[dict] = Field( + default=None, + sa_column=Column(JSON), + description="Tools that were executed and their results" + ) + + +class TriggerExecutionIn(BaseModel): + """Input model for creating trigger executions""" + trigger_id: int + execution_id: str + execution_type: ExecutionType + input_data: Optional[dict] = None + max_retries: int = 3 + + +class TriggerExecutionUpdate(BaseModel): + """Model for updating trigger executions""" + status: Optional[ExecutionStatus] = None + started_at: Optional[datetime] = None + completed_at: Optional[datetime] = None + duration_seconds: Optional[float] = None + output_data: Optional[dict] = None + error_message: Optional[str] = None + attempts: Optional[int] = None + tokens_used: Optional[int] = None + tools_executed: Optional[dict] = None + + +class TriggerExecutionOut(BaseModel): + """Output model for execution records""" + id: int + trigger_id: int + execution_id: str + execution_type: ExecutionType + status: ExecutionStatus + started_at: Optional[datetime] = None + completed_at: Optional[datetime] = None + duration_seconds: Optional[float] = None + input_data: Optional[dict] = None + output_data: Optional[dict] = None + error_message: Optional[str] = None + attempts: int + max_retries: int + tokens_used: Optional[int] = None + tools_executed: Optional[dict] = None + created_at: Optional[datetime] = None + updated_at: Optional[datetime] = None \ No newline at end of file diff --git a/server/app/schedule/trigger_schedule_task.py b/server/app/schedule/trigger_schedule_task.py new file mode 100644 index 000000000..88f1a17a8 --- /dev/null +++ b/server/app/schedule/trigger_schedule_task.py @@ -0,0 +1,176 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from celery import shared_task +import logging +from datetime import datetime, timezone +from sqlmodel import select, or_ + +from app.component.database import session_make +from app.component.environment import env +from app.service.trigger.trigger_schedule_service import TriggerScheduleService +from app.service.trigger.trigger_service import TriggerService +from app.component.trigger_utils import MAX_DISPATCH_PER_TICK +from app.component.redis_utils import get_redis_manager +from app.model.trigger.trigger_execution import TriggerExecution +from app.model.trigger.trigger import Trigger +from app.type.trigger_types import ExecutionStatus + +# Timeout configuration from environment variables +EXECUTION_PENDING_TIMEOUT_SECONDS = int(env("EXECUTION_PENDING_TIMEOUT_SECONDS", "60")) +EXECUTION_RUNNING_TIMEOUT_SECONDS = int(env("EXECUTION_RUNNING_TIMEOUT_SECONDS", "600")) # 10 minutes + +logger = logging.getLogger("server_trigger_schedule_task") + +@shared_task(queue="poll_trigger_schedules") +def poll_trigger_schedules() -> None: + """ + Celery task to poll and execute scheduled triggers. + This runs periodically to check for triggers that are due for execution. + + This is a lightweight wrapper around TriggerScheduleService that handles + session management and delegates all business logic to the service layer. + """ + logger.info("Starting poll_trigger_schedules task") + + session = session_make() + try: + # Create service instance with session + schedule_service = TriggerScheduleService(session) + + # Delegate all logic to the service + schedule_service.poll_and_execute_due_triggers( + max_dispatch_per_tick=MAX_DISPATCH_PER_TICK + ) + finally: + session.close() + + +@shared_task(queue="check_execution_timeouts") +def check_execution_timeouts() -> None: + """ + Celery task to check for timed-out pending and running executions. + + This runs periodically to find: + - Pending executions that haven't been acknowledged within EXECUTION_PENDING_TIMEOUT_SECONDS + - Running executions that have been stuck for more than EXECUTION_RUNNING_TIMEOUT_SECONDS + + These are marked as missed/failed respectively. + """ + logger.info("Starting check_execution_timeouts task", extra={ + "pending_timeout": EXECUTION_PENDING_TIMEOUT_SECONDS, + "running_timeout": EXECUTION_RUNNING_TIMEOUT_SECONDS + }) + + session = session_make() + redis_manager = get_redis_manager() + trigger_service = TriggerService(session) + + try: + now = datetime.now(timezone.utc) + + # Find all pending and running executions + executions = session.exec( + select(TriggerExecution).where( + or_( + TriggerExecution.status == ExecutionStatus.pending, + TriggerExecution.status == ExecutionStatus.running + ) + ) + ).all() + + timed_out_pending_count = 0 + timed_out_running_count = 0 + + for execution in executions: + is_pending = execution.status == ExecutionStatus.pending + is_running = execution.status == ExecutionStatus.running + + # Determine the reference time and timeout based on status + if is_pending: + reference_time = execution.created_at + timeout_seconds = EXECUTION_PENDING_TIMEOUT_SECONDS + else: # running + reference_time = execution.started_at or execution.created_at + timeout_seconds = EXECUTION_RUNNING_TIMEOUT_SECONDS + + if reference_time.tzinfo is None: + reference_time = reference_time.replace(tzinfo=timezone.utc) + time_elapsed = (now - reference_time).total_seconds() + + if time_elapsed > timeout_seconds: + # Determine the new status and error message + if is_pending: + new_status = ExecutionStatus.missed + error_message = f"Execution acknowledgment timeout ({timeout_seconds} seconds)" + timed_out_pending_count += 1 + else: + new_status = ExecutionStatus.failed + error_message = f"Execution running timeout ({timeout_seconds} seconds) - no completion received" + timed_out_running_count += 1 + + # Use TriggerService.update_execution_status for proper failure tracking + trigger_service.update_execution_status( + execution=execution, + status=new_status, + error_message=error_message + ) + + # Remove from Redis pending list (best effort, may not exist) + try: + # Get all sessions for this execution's user + trigger = session.get(Trigger, execution.trigger_id) + if trigger and trigger.user_id: + user_session_ids = redis_manager.get_user_sessions(trigger.user_id) + for session_id in user_session_ids: + redis_manager.remove_pending_execution(session_id, execution.execution_id) + elif not trigger: + logger.warning("Trigger not found for execution", extra={ + "execution_id": execution.execution_id, + "trigger_id": execution.trigger_id + }) + except Exception as e: + logger.warning("Failed to remove execution from Redis", extra={ + "execution_id": execution.execution_id, + "trigger_id": execution.trigger_id, + "error": str(e) + }) + + logger.info("Execution timed out", extra={ + "execution_id": execution.execution_id, + "trigger_id": execution.trigger_id, + "original_status": "pending" if is_pending else "running", + "new_status": new_status.value, + "time_elapsed": time_elapsed + }) + + total_timed_out = timed_out_pending_count + timed_out_running_count + if total_timed_out > 0: + logger.info("Marked executions as timed out", extra={ + "timed_out_pending_count": timed_out_pending_count, + "timed_out_running_count": timed_out_running_count, + "total_timed_out": total_timed_out + }) + else: + logger.debug("No timed-out executions found") + + except Exception as e: + logger.error("Error checking execution timeouts", extra={ + "error": str(e), + "error_type": type(e).__name__ + }, exc_info=True) + session.rollback() + + finally: + session.close() \ No newline at end of file diff --git a/server/app/service/trigger/__init__.py b/server/app/service/trigger/__init__.py new file mode 100644 index 000000000..f8bbaca25 --- /dev/null +++ b/server/app/service/trigger/__init__.py @@ -0,0 +1,54 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Trigger Service Package + +Contains services for managing triggers including: +- TriggerService: Main service for trigger operations +- TriggerScheduleService: Service for scheduled trigger operations +- App Handlers: Handlers for different trigger types (Slack, Webhook, Schedule) +""" + +from app.service.trigger.trigger_service import TriggerService, get_trigger_service +from app.service.trigger.trigger_schedule_service import TriggerScheduleService +from app.service.trigger.app_handler_service import ( + BaseAppHandler, + SlackAppHandler, + DefaultWebhookHandler, + ScheduleAppHandler, + AppHandlerResult, + get_app_handler, + get_schedule_handler, + register_app_handler, + get_supported_trigger_types, +) + +__all__ = [ + # Services + "TriggerService", + "get_trigger_service", + "TriggerScheduleService", + # Handlers + "BaseAppHandler", + "SlackAppHandler", + "DefaultWebhookHandler", + "ScheduleAppHandler", + "AppHandlerResult", + # Handler functions + "get_app_handler", + "get_schedule_handler", + "register_app_handler", + "get_supported_trigger_types", +] \ No newline at end of file diff --git a/server/app/service/trigger/app_handler_service.py b/server/app/service/trigger/app_handler_service.py new file mode 100644 index 000000000..be99502bd --- /dev/null +++ b/server/app/service/trigger/app_handler_service.py @@ -0,0 +1,448 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +""" +Trigger App Handler Service + +Modular service for handling app-specific webhook authentication, +filtering, and payload normalization based on trigger_type. +""" + +import re +from typing import Optional +from dataclasses import dataclass +from fastapi import Request +from sqlmodel import Session, select, and_ +import logging + +from app.model.trigger.trigger import Trigger +from app.model.config.config import Config +from app.model.trigger.app_configs import SlackTriggerConfig, WebhookTriggerConfig, ScheduleTriggerConfig +from app.type.trigger_types import TriggerType, ExecutionType, TriggerStatus +from app.type.config_group import ConfigGroup + +logger = logging.getLogger("server_app_handler_service") + + +@dataclass +class AppHandlerResult: + """Result from app handler operations.""" + success: bool + data: Optional[dict] = None + reason: Optional[str] = None + + +class BaseAppHandler: + """Base class for app-specific handlers.""" + + trigger_type: TriggerType + execution_type: ExecutionType = ExecutionType.webhook + config_group: Optional[str] = None + + async def get_credentials(self, session: Session, user_id: str) -> dict: + """Get user credentials from config table.""" + if not self.config_group: + return {} + + configs = session.exec( + select(Config).where( + and_( + Config.user_id == int(user_id), + Config.config_group == self.config_group + ) + ) + ).all() + return {config.config_name: config.config_value for config in configs} + + async def authenticate( + self, + request: Request, + body: bytes, + trigger: Trigger, + session: Session + ) -> AppHandlerResult: + """ + Authenticate the incoming webhook request. + Returns (success, challenge_response or None) + """ + return AppHandlerResult(success=True) + + async def filter_event( + self, + payload: dict, + trigger: Trigger + ) -> AppHandlerResult: + """ + Filter events based on trigger configuration. + Returns (should_process, reason) + """ + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize the payload for execution input.""" + return payload + + +class SlackAppHandler(BaseAppHandler): + """Handler for Slack triggers.""" + + trigger_type = TriggerType.slack_trigger + execution_type = ExecutionType.slack + config_group = ConfigGroup.SLACK.value + + async def authenticate( + self, + request: Request, + body: bytes, + trigger: Trigger, + session: Session + ) -> AppHandlerResult: + """Handle Slack authentication and URL verification.""" + from camel.auth.slack_auth import SlackAuth + + credentials = await self.get_credentials(session, trigger.user_id) + + slack_auth = SlackAuth( + signing_secret=credentials.get("SLACK_SIGNING_SECRET"), + bot_token=credentials.get("SLACK_BOT_TOKEN"), + api_token=credentials.get("SLACK_API_TOKEN"), + ) + + # Check for URL verification challenge + challenge_response = slack_auth.get_verification_response(request, body) + if challenge_response: + # Return the challenge response (already in correct format: {"challenge": "..."}) + logger.info(f"Slack URL verification - challenge_response: {challenge_response}") + return AppHandlerResult(success=True, data=challenge_response) + + # Verify webhook signature + if not slack_auth.verify_webhook_request(request, body): + logger.warning("Invalid Slack webhook signature", extra={ + "trigger_id": trigger.id + }) + return AppHandlerResult(success=False, reason="invalid_signature") + + return AppHandlerResult(success=True) + + async def filter_event( + self, + payload: dict, + trigger: Trigger + ) -> AppHandlerResult: + """Filter Slack events based on trigger config.""" + # Prefer 'config' field + config_data = trigger.config or {} + config = SlackTriggerConfig(**config_data) + event = payload.get("event", {}) + event_type = event.get("type", "") + + # Check event type + if not config.should_trigger(event_type): + return AppHandlerResult(success=False, reason="event_type_not_configured") + + # Check channel filter (if channel_id is set, only trigger for that channel) + if config.channel_id: + if event.get("channel") != config.channel_id: + return AppHandlerResult(success=False, reason="channel_not_matched") + + # Check bot message filter + if config.ignore_bot_messages: + if event.get("bot_id") or event.get("subtype") == "bot_message": + return AppHandlerResult(success=False, reason="bot_message_ignored") + + # Check user filter + if config.ignore_users and event.get("user") in config.ignore_users: + return AppHandlerResult(success=False, reason="user_filtered") + + # Check message filter regex + if config.message_filter and event.get("text"): + if not re.search(config.message_filter, event.get("text", ""), re.IGNORECASE): + return AppHandlerResult(success=False, reason="message_filter_not_matched") + + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize Slack event payload.""" + logger.info("Normalizing payload", extra={"payload": payload}) + # Prefer 'config' field + config_data = trigger.config or {} + config = SlackTriggerConfig(**config_data) + event = payload.get("event", {}) + + normalized = { + "event_type": event.get("type"), + "event_ts": event.get("event_ts"), + "team_id": payload.get("team_id"), + "user_id": event.get("user"), + "channel_id": event.get("channel"), + "text": event.get("text"), + "message_ts": event.get("ts"), + "thread_ts": event.get("thread_ts"), + "reaction": event.get("reaction"), + "files": event.get("files"), + "event_id": payload.get("event_id") or payload.get("id") + } + + # if config.include_raw_payload: + # normalized["raw_payload"] = payload + + return normalized + + +class DefaultWebhookHandler(BaseAppHandler): + """Default handler for generic webhooks with config-based filtering.""" + + trigger_type = TriggerType.webhook + execution_type = ExecutionType.webhook + + async def filter_event( + self, + payload: dict, + trigger: Trigger, + headers: dict = None, + body_raw: str = None + ) -> AppHandlerResult: + """Filter webhook events based on trigger config.""" + config_data = trigger.config or {} + config = WebhookTriggerConfig(**config_data) + + # Get text content for message_filter (check body for text field or stringify) + text = None + if isinstance(payload, dict): + text = payload.get("text") or payload.get("message") or payload.get("content") + if text is None and body_raw: + text = body_raw + + # Use the config's should_trigger method + should_trigger, reason = config.should_trigger( + body=body_raw or "", + headers=headers or {}, + text=text + ) + + if not should_trigger: + return AppHandlerResult(success=False, reason=reason) + + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize generic webhook payload with full request metadata.""" + config_data = trigger.config or {} + config = WebhookTriggerConfig(**config_data) + + result = {"body": payload} + + if request_meta: + # Include headers if configured + if config.include_headers and "headers" in request_meta: + result["headers"] = request_meta["headers"] + + # Include query params if configured + if config.include_query_params and "query_params" in request_meta: + result["query_params"] = request_meta["query_params"] + + # Include request metadata if configured + if config.include_request_metadata: + if "method" in request_meta: + result["method"] = request_meta["method"] + if "url" in request_meta: + result["url"] = request_meta["url"] + if "client_ip" in request_meta: + result["client_ip"] = request_meta["client_ip"] + + return result + + +class ScheduleAppHandler(BaseAppHandler): + """ + Handler for scheduled triggers. + + Manages schedule-specific logic including: + - Expiration checking (expirationDate for recurring schedules) + - Date validation for one-time executions (date field) + """ + + trigger_type = TriggerType.schedule + execution_type = ExecutionType.scheduled + + async def filter_event( + self, + payload: dict, + trigger: Trigger + ) -> AppHandlerResult: + """ + Filter scheduled events based on trigger config. + + Checks: + - If one-time (date set) and date has passed + - If recurring with expirationDate and it has passed + """ + config_data = trigger.config or {} + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + logger.warning( + "Invalid schedule config", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + # Allow execution if config is missing/invalid (backwards compatibility) + return AppHandlerResult(success=True, reason="ok") + + # Check if schedule should execute + should_execute, reason = config.should_execute() + + if not should_execute: + return AppHandlerResult(success=False, reason=reason) + + return AppHandlerResult(success=True, reason="ok") + + def normalize_payload( + self, + payload: dict, + trigger: Trigger, + request_meta: dict = None + ) -> dict: + """Normalize scheduled trigger payload.""" + config_data = trigger.config or {} + + normalized = { + "scheduled_at": payload.get("scheduled_at"), + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "is_single_execution": trigger.is_single_execution, + } + + # Include config details if present + if config_data: + if config_data.get("date"): + normalized["date"] = config_data.get("date") + if config_data.get("expirationDate"): + normalized["expirationDate"] = config_data.get("expirationDate") + + return normalized + + def check_and_handle_expiration( + self, + trigger: Trigger, + session: Session + ) -> bool: + """ + Check if a schedule has expired and handle accordingly. + + Args: + trigger: The trigger to check + session: Database session for updates + + Returns: + True if trigger is expired and was deactivated, False otherwise + """ + config_data = trigger.config or {} + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + logger.warning( + "Invalid schedule config during expiration check", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + return False + + if config.is_expired(): + # Deactivate the trigger + trigger.status = TriggerStatus.completed + session.add(trigger) + session.commit() + + logger.info( + "Schedule trigger expired and deactivated", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "expiration_date": config.expirationDate or config.date + } + ) + + return True + + return False + + def validate_schedule_for_execution( + self, + trigger: Trigger + ) -> tuple[bool, str]: + """ + Validate that a scheduled trigger is valid for execution. + + Args: + trigger: The trigger to validate + + Returns: + Tuple of (is_valid, reason) + """ + config_data = trigger.config or {} + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + return False, f"invalid_config: {str(e)}" + + # Check expiration + if config.is_expired(): + return False, "schedule_expired" + + return True, "ok" + + +# Registry of handlers by trigger_type +_HANDLERS: dict[TriggerType, BaseAppHandler] = { + TriggerType.slack_trigger: SlackAppHandler(), + TriggerType.webhook: DefaultWebhookHandler(), + TriggerType.schedule: ScheduleAppHandler(), +} + + +def get_app_handler(trigger_type: TriggerType) -> Optional[BaseAppHandler]: + """Get the handler for a trigger type.""" + return _HANDLERS.get(trigger_type) + + +def register_app_handler(trigger_type: TriggerType, handler: BaseAppHandler): + """Register a new app handler.""" + _HANDLERS[trigger_type] = handler + + +def get_supported_trigger_types() -> list[TriggerType]: + """Get list of trigger types with webhook support.""" + return list(_HANDLERS.keys()) + + +def get_schedule_handler() -> ScheduleAppHandler: + """Get the schedule handler instance.""" + return _HANDLERS.get(TriggerType.schedule) diff --git a/server/app/service/trigger/trigger_schedule_service.py b/server/app/service/trigger/trigger_schedule_service.py new file mode 100644 index 000000000..4db83b460 --- /dev/null +++ b/server/app/service/trigger/trigger_schedule_service.py @@ -0,0 +1,430 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from datetime import datetime, timedelta, timezone +from typing import List, Tuple, Optional +import logging +from croniter import croniter +from uuid import uuid4 +import asyncio +from sqlmodel import select + +from app.model.trigger.trigger import Trigger +from app.model.trigger.trigger_execution import TriggerExecution +from app.type.trigger_types import TriggerStatus, ExecutionType, ExecutionStatus, TriggerType +from app.component.trigger_utils import check_rate_limits, MAX_DISPATCH_PER_TICK +from app.model.trigger.app_configs import ScheduleTriggerConfig + +logger = logging.getLogger("server_trigger_schedule_service") + + +class TriggerScheduleService: + """Service for managing scheduled trigger operations. + This service mainly delegates schedule business logic + from the main trigger_service.py. + + Handles tasks from the Celery beat scheduler. + + Mainly handles: + - Polling for due schedules + - Dispatching scheduled triggers + - Calculating next run times based on cron expressions + """ + + def __init__(self, session): + """ + Initialize the schedule service with a database session. + + Args: + session: SQLModel session for database operations + """ + self.session = session + + def fetch_due_schedules(self, limit: Optional[int] = 100) -> List[Trigger]: + """ + Fetch triggers that are due for execution. + + Args: + limit: Maximum number of triggers to fetch + + Returns: + List of triggers that need to be executed + """ + now = datetime.now(timezone.utc) + + try: + statement = ( + select(Trigger) + .where(Trigger.trigger_type == TriggerType.schedule) + .where(Trigger.status == TriggerStatus.active) + .where(Trigger.next_run_at <= now) + .order_by(Trigger.next_run_at) + .limit(limit) + ) + + results = self.session.exec(statement).all() + + logger.debug( + "Fetched due schedules", + extra={ + "count": len(results), + "current_time": now.isoformat() + } + ) + + return list(results) + + except Exception as e: + logger.error( + "Failed to fetch due schedules", + extra={"error": str(e)}, + exc_info=True + ) + return [] + + def calculate_next_run_at( + self, + trigger: Trigger, + base_time: Optional[datetime] = None + ) -> datetime: + """ + Calculate the next run time for a trigger based on its cron expression. + + Args: + trigger: The trigger to calculate next run time for + base_time: Base time to calculate from (defaults to now) + + Returns: + The next scheduled run time + + Raises: + ValueError: If trigger has no cron expression or invalid expression + """ + if not trigger.custom_cron_expression: + raise ValueError(f"Trigger {trigger.id} has no cron expression") + + if base_time is None: + base_time = datetime.now(timezone.utc) + + try: + cron = croniter(trigger.custom_cron_expression, base_time) + next_run = cron.get_next(datetime) + return next_run + except Exception as e: + logger.error( + "Failed to calculate next run time", + extra={ + "trigger_id": trigger.id, + "cron_expression": trigger.custom_cron_expression, + "error": str(e) + } + ) + raise + + def dispatch_trigger(self, trigger: Trigger) -> bool: + """ + Dispatch a trigger for execution. + + Args: + trigger: The trigger to dispatch + + Returns: + True if dispatched successfully, False otherwise + """ + try: + # Check schedule expiration before dispatching + if not self._check_schedule_valid(trigger): + logger.info( + "Schedule trigger expired, skipping dispatch", + extra={"trigger_id": trigger.id, "trigger_name": trigger.name} + ) + return False + + # Create execution record + execution_id = str(uuid4()) + execution = TriggerExecution( + trigger_id=trigger.id, + execution_id=execution_id, + execution_type=ExecutionType.scheduled, + status=ExecutionStatus.pending, + input_data={"scheduled_at": datetime.now(timezone.utc).isoformat()}, + started_at=datetime.now(timezone.utc) + ) + + self.session.add(execution) + + # Update trigger statistics + trigger.last_executed_at = datetime.now(timezone.utc) + trigger.last_execution_status = "pending" + + # Calculate and set next run time + try: + trigger.next_run_at = self.calculate_next_run_at(trigger, datetime.now(timezone.utc)) + except Exception as e: + logger.error( + "Failed to calculate next run time, trigger will be skipped", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + # Set next_run_at far in the future to prevent immediate re-execution + trigger.next_run_at = datetime.now(timezone.utc) + timedelta(days=365) + + # If single execution, deactivate the trigger + if trigger.is_single_execution: + trigger.status = TriggerStatus.inactive + logger.info( + "Trigger deactivated after single execution", + extra={"trigger_id": trigger.id} + ) + + self.session.add(trigger) + self.session.commit() + + # TODO: Queue the actual task execution + # This would integrate with a task queue (e.g., Celery) to execute the trigger's action + # For now event is sent to client for execution + + logger.info( + "Trigger dispatched successfully", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "execution_id": execution_id, + "next_run_at": trigger.next_run_at.isoformat() if trigger.next_run_at else None + } + ) + + # Notify WebSocket subscribers + # Using asyncio.run() to run async code from sync Celery worker context + try: + # Notify WebSocket subscribers via Redis pub/sub + from app.component.redis_utils import get_redis_manager + redis_manager = get_redis_manager() + redis_manager.publish_execution_event({ + "type": "execution_created", + "execution_id": execution_id, + "trigger_id": trigger.id, + "trigger_type": "schedule", + "status": "pending", + "input_data": execution.input_data, + "task_prompt": trigger.task_prompt, + "execution_type": "schedule", + "user_id": str(trigger.user_id), + "project_id": str(trigger.project_id) + }) + + logger.debug("WebSocket notification sent", extra={ + "execution_id": execution_id, + "trigger_id": trigger.id + }) + except Exception as e: + # Don't fail the trigger dispatch if notification fails + logger.warning("Failed to send WebSocket notification", extra={ + "trigger_id": trigger.id, + "execution_id": execution_id, + "error": str(e) + }) + + return True + + except Exception as e: + logger.error( + "Failed to dispatch trigger", + extra={ + "trigger_id": trigger.id, + "error": str(e) + }, + exc_info=True + ) + self.session.rollback() + return False + + def process_schedules(self, due_schedules: List[Trigger]) -> Tuple[int, int]: + """ + Process due schedules, checking rate limits and dispatching. + + Args: + due_schedules: List of triggers that are due for execution + + Returns: + Tuple of (dispatched_count, rate_limited_count) + """ + dispatched_count = 0 + rate_limited_count = 0 + + for trigger in due_schedules: + # Check rate limits + if not check_rate_limits(self.session, trigger): + rate_limited_count += 1 + + # Still update next_run_at even if rate limited, so we don't keep checking + try: + trigger.next_run_at = self.calculate_next_run_at(trigger, datetime.now(timezone.utc)) + self.session.add(trigger) + self.session.commit() + except Exception as e: + logger.error( + "Failed to update next_run_at for rate limited trigger", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + + continue + + # Dispatch the trigger + if self.dispatch_trigger(trigger): + dispatched_count += 1 + + return dispatched_count, rate_limited_count + + def poll_and_execute_due_triggers( + self, + max_dispatch_per_tick: Optional[int] = None + ) -> Tuple[int, int]: + """ + Poll for due triggers and execute them in batches. + + Args: + max_dispatch_per_tick: Maximum number of triggers to dispatch in this tick + (defaults to MAX_DISPATCH_PER_TICK) + + Returns: + Tuple of (total_dispatched, total_rate_limited) + """ + max_dispatch = max_dispatch_per_tick or MAX_DISPATCH_PER_TICK + total_dispatched = 0 + total_rate_limited = 0 + + # Process in batches until we've handled all due schedules or hit the limit + while True: + due_schedules = self.fetch_due_schedules() + + if not due_schedules: + break + + dispatched_count, rate_limited_count = self.process_schedules(due_schedules) + total_dispatched += dispatched_count + total_rate_limited += rate_limited_count + + logger.debug( + "Batch processed", + extra={ + "dispatched": dispatched_count, + "rate_limited": rate_limited_count + } + ) + + # Check if we've hit the per-tick limit (if enabled) + if max_dispatch > 0 and total_dispatched >= max_dispatch: + logger.warning( + "Circuit breaker activated: reached dispatch limit, will continue next tick", + extra={"limit": max_dispatch} + ) + break + + if total_dispatched > 0 or total_rate_limited > 0: + logger.info( + "Trigger schedule poll completed", + extra={ + "total_dispatched": total_dispatched, + "total_rate_limited": total_rate_limited + } + ) + + return total_dispatched, total_rate_limited + + def _check_schedule_valid(self, trigger: Trigger) -> bool: + """ + Check if a scheduled trigger is valid for execution. + + Validates: + - For one-time (date set): Checks if the scheduled date has passed + - For recurring (expirationDate set): Checks if expirationDate has passed + + If expired, the trigger will be marked as completed. + + Args: + trigger: The trigger to check + + Returns: + True if trigger is valid for execution, False if expired + """ + config_data = trigger.config or {} + + # If no config or empty config, allow execution (no expiration) + if not config_data: + return True + + try: + config = ScheduleTriggerConfig(**config_data) + except Exception as e: + logger.warning( + "Invalid schedule config", + extra={"trigger_id": trigger.id, "error": str(e)} + ) + return False + + # Check if schedule has expired + if config.is_expired(): + # Mark trigger as completed + trigger.status = TriggerStatus.completed + self.session.add(trigger) + self.session.commit() + + logger.info( + "Schedule trigger expired and marked as completed", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "expiration_info": config.expirationDate or config.date + } + ) + return False + + return True + + def update_trigger_next_run(self, trigger: Trigger) -> None: + """ + Update a trigger's next_run_at based on its cron expression. + + Args: + trigger: The trigger to update + """ + try: + # Check if schedule is expired before updating next run + if not self._check_schedule_valid(trigger): + logger.info( + "Trigger expired, not updating next_run_at", + extra={"trigger_id": trigger.id} + ) + return + + trigger.next_run_at = self.calculate_next_run_at(trigger) + self.session.add(trigger) + self.session.commit() + + logger.info( + "Trigger next_run_at updated", + extra={ + "trigger_id": trigger.id, + "next_run_at": trigger.next_run_at.isoformat() + } + ) + except Exception as e: + logger.error( + "Failed to update trigger next_run_at", + extra={ + "trigger_id": trigger.id, + "error": str(e) + } + ) + self.session.rollback() diff --git a/server/app/service/trigger/trigger_service.py b/server/app/service/trigger/trigger_service.py new file mode 100644 index 000000000..4166466d0 --- /dev/null +++ b/server/app/service/trigger/trigger_service.py @@ -0,0 +1,392 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from datetime import datetime, timedelta, timezone +from typing import Optional, List, Dict, Any +from sqlmodel import select, and_, or_ +from uuid import uuid4 +import logging + +from app.model.trigger.trigger import Trigger +from app.model.trigger.trigger_execution import TriggerExecution +from app.type.trigger_types import TriggerType, TriggerStatus, ExecutionType, ExecutionStatus +from app.component.database import session_make +from app.service.trigger.trigger_schedule_service import TriggerScheduleService +from app.component.trigger_utils import SCHEDULED_FETCH_BATCH_SIZE, check_rate_limits +from app.model.trigger.app_configs import ScheduleTriggerConfig, WebhookTriggerConfig +from app.model.trigger.app_configs.base_config import BaseTriggerConfig + +logger = logging.getLogger("server_trigger_service") + + +class TriggerService: + """Service for managing trigger operations and scheduling.""" + + def __init__(self, session=None): + self.session = session or session_make() + self.schedule_service = TriggerScheduleService(self.session) + + def create_execution( + self, + trigger: Trigger, + execution_type: ExecutionType, + input_data: Optional[Dict[str, Any]] = None + ) -> TriggerExecution: + """Create a new trigger execution.""" + execution_id = str(uuid4()) + + execution = TriggerExecution( + trigger_id=trigger.id, + execution_id=execution_id, + execution_type=execution_type, + status=ExecutionStatus.pending, + input_data=input_data or {}, + started_at=datetime.now(timezone.utc) + ) + + self.session.add(execution) + self.session.commit() + self.session.refresh(execution) + + # Update trigger statistics + trigger.last_executed_at = datetime.now(timezone.utc) + trigger.last_execution_status = "pending" + self.session.add(trigger) + self.session.commit() + + logger.info("Execution created", extra={ + "trigger_id": trigger.id, + "execution_id": execution_id, + "execution_type": execution_type.value + }) + + return execution + + def update_execution_status( + self, + execution: TriggerExecution, + status: ExecutionStatus, + output_data: Optional[Dict[str, Any]] = None, + error_message: Optional[str] = None, + tokens_used: Optional[int] = None, + tools_executed: Optional[Dict[str, Any]] = None + ) -> TriggerExecution: + """Update execution status and metadata.""" + execution.status = status + + # Set completed_at and duration for terminal statuses + if status in [ExecutionStatus.completed, ExecutionStatus.failed, ExecutionStatus.cancelled, ExecutionStatus.missed]: + execution.completed_at = datetime.now(timezone.utc) + if execution.started_at: + # Ensure started_at is timezone-aware for subtraction + started_at = execution.started_at + if started_at.tzinfo is None: + started_at = started_at.replace(tzinfo=timezone.utc) + execution.duration_seconds = (execution.completed_at - started_at).total_seconds() + + if output_data: + execution.output_data = output_data + + if error_message: + execution.error_message = error_message + + if tokens_used: + execution.tokens_used = tokens_used + + if tools_executed: + execution.tools_executed = tools_executed + + self.session.add(execution) + self.session.commit() + + # Update trigger status and handle auto-disable logic + trigger = self.session.get(Trigger, execution.trigger_id) + if trigger: + if status == ExecutionStatus.failed: + trigger.last_execution_status = "failed" + trigger.consecutive_failures += 1 + + # Check for auto-disable based on max_failure_count in config + self._check_auto_disable(trigger) + + elif status == ExecutionStatus.completed: + trigger.last_execution_status = "completed" + # Reset consecutive failures on success + trigger.consecutive_failures = 0 + elif status == ExecutionStatus.cancelled: + trigger.last_execution_status = "cancelled" + elif status == ExecutionStatus.missed: + trigger.last_execution_status = "missed" + + self.session.add(trigger) + self.session.commit() + + logger.info("Execution status updated", extra={ + "execution_id": execution.execution_id, + "status": status.name, + "duration": execution.duration_seconds + }) + + return execution + + def _check_auto_disable(self, trigger: Trigger) -> bool: + """ + Check if trigger should be auto-disabled based on consecutive failures. + + Args: + trigger: The trigger to check + + Returns: + True if trigger was auto-disabled, False otherwise + """ + if not trigger.config: + return False + + try: + # Get the appropriate config class based on trigger type + config: BaseTriggerConfig + if trigger.trigger_type == TriggerType.schedule: + config = ScheduleTriggerConfig(**trigger.config) + elif trigger.trigger_type == TriggerType.webhook: + config = WebhookTriggerConfig(**trigger.config) + else: + # For other trigger types, use base config + config = BaseTriggerConfig(**trigger.config) + + # Check if auto-disable should happen + if config.should_auto_disable(trigger.consecutive_failures): + trigger.status = TriggerStatus.inactive + trigger.auto_disabled_at = datetime.now(timezone.utc) + + logger.warning( + "Trigger auto-disabled due to max failures", + extra={ + "trigger_id": trigger.id, + "trigger_name": trigger.name, + "consecutive_failures": trigger.consecutive_failures, + "max_failure_count": config.max_failure_count + } + ) + return True + + except Exception as e: + logger.error( + "Failed to check auto-disable for trigger", + extra={ + "trigger_id": trigger.id, + "error": str(e) + } + ) + + return False + + def get_pending_executions(self) -> List[TriggerExecution]: + """Get all pending executions that need to be processed.""" + executions = self.session.exec( + select(TriggerExecution).where( + TriggerExecution.status == ExecutionStatus.pending + ).order_by(TriggerExecution.created_at) + ).all() + + return list(executions) + + def get_failed_executions_for_retry(self) -> List[TriggerExecution]: + """Get failed executions that can be retried.""" + executions = self.session.exec( + select(TriggerExecution).where( + and_( + TriggerExecution.status == ExecutionStatus.failed, + TriggerExecution.attempts < TriggerExecution.max_retries + ) + ).order_by(TriggerExecution.created_at) + ).all() + + return list(executions) + + def get_due_scheduled_triggers(self, limit: Optional[int] = None) -> List[Trigger]: + """ + Fetch scheduled triggers that are due for execution. + + Args: + limit: Maximum number of triggers to fetch (defaults to SCHEDULED_FETCH_BATCH_SIZE) + + Returns: + List of triggers that are due for execution + """ + current_time = datetime.now(timezone.utc) + limit = limit or SCHEDULED_FETCH_BATCH_SIZE + + # Query triggers that: + # 1. Are scheduled type + # 2. Are active + # 3. Have a cron expression + # 4. next_run_at is null (never run) or next_run_at <= now + triggers = self.session.exec( + select(Trigger) + .where( + and_( + Trigger.trigger_type == TriggerType.schedule, + Trigger.status == TriggerStatus.active, + Trigger.custom_cron_expression.is_not(None), + or_( + Trigger.next_run_at.is_(None), + Trigger.next_run_at <= current_time + ) + ) + ) + .limit(limit) + ).all() + + return list(triggers) + + def execute_scheduled_triggers(self) -> int: + """ + Execute all due scheduled triggers. + Uses TriggerScheduleService for the actual execution logic. + """ + due_triggers = self.get_due_scheduled_triggers() + + if not due_triggers: + return 0 + + dispatched_count, rate_limited_count = self.schedule_service.process_schedules(due_triggers) + + logger.info( + "Scheduled triggers execution completed", + extra={ + "dispatched": dispatched_count, + "rate_limited": rate_limited_count + } + ) + + return dispatched_count + + def process_slack_trigger( + self, + trigger: Trigger, + slack_data: Dict[str, Any] + ) -> Optional[TriggerExecution]: + """Process a Slack trigger event.""" + if trigger.trigger_type != TriggerType.slack_trigger: + raise ValueError("Trigger is not a Slack trigger") + + if trigger.status != TriggerStatus.active: + logger.warning("Slack trigger is not active", extra={ + "trigger_id": trigger.id + }) + return None + + if not check_rate_limits(self.session, trigger): + logger.warning("Slack trigger execution skipped due to rate limits", extra={ + "trigger_id": trigger.id + }) + return None + + try: + execution = self.create_execution( + trigger=trigger, + execution_type=ExecutionType.slack, + input_data=slack_data + ) + + # TODO: Queue the actual task execution + + logger.info("Slack trigger executed", extra={ + "trigger_id": trigger.id, + "execution_id": execution.execution_id + }) + + return execution + + except Exception as e: + logger.error("Slack trigger execution failed", extra={ + "trigger_id": trigger.id, + "error": str(e) + }, exc_info=True) + return None + + def cleanup_old_executions(self, days_to_keep: int = 30) -> int: + """Clean up old execution records.""" + cutoff_date = datetime.now(timezone.utc) - timedelta(days=days_to_keep) + + old_executions = self.session.exec( + select(TriggerExecution).where( + and_( + TriggerExecution.created_at < cutoff_date, + TriggerExecution.status.in_([ + ExecutionStatus.completed, + ExecutionStatus.failed, + ExecutionStatus.cancelled + ]) + ) + ) + ).all() + + count = len(old_executions) + + for execution in old_executions: + self.session.delete(execution) + + self.session.commit() + + logger.info("Old executions cleaned up", extra={ + "count": count, + "days_to_keep": days_to_keep + }) + + return count + + def get_trigger_statistics(self, trigger_id: int) -> Dict[str, Any]: + """Get statistics for a specific trigger.""" + trigger = self.session.get(Trigger, trigger_id) + if not trigger: + raise ValueError("Trigger not found") + + # Get execution counts by status + executions = self.session.exec( + select(TriggerExecution).where( + TriggerExecution.trigger_id == trigger_id + ) + ).all() + + stats = { + "trigger_id": trigger_id, + "name": trigger.name, + "trigger_type": trigger.trigger_type.value, + "status": trigger.status.name, + "total_executions": len(executions), + "successful_executions": len([e for e in executions if e.status == ExecutionStatus.completed]), + "failed_executions": len([e for e in executions if e.status == ExecutionStatus.failed]), + "pending_executions": len([e for e in executions if e.status == ExecutionStatus.pending]), + "cancelled_executions": len([e for e in executions if e.status == ExecutionStatus.cancelled]), + "last_executed_at": trigger.last_executed_at.isoformat() if trigger.last_executed_at else None, + "created_at": trigger.created_at.isoformat() if trigger.created_at else None + } + + # Calculate average execution time for completed executions + completed_executions = [e for e in executions if e.status == ExecutionStatus.completed and e.duration_seconds] + if completed_executions: + avg_duration = sum(e.duration_seconds for e in completed_executions) / len(completed_executions) + stats["average_execution_time_seconds"] = round(avg_duration, 2) + + # Calculate total tokens used + total_tokens = sum(e.tokens_used for e in executions if e.tokens_used) + if total_tokens: + stats["total_tokens_used"] = total_tokens + + return stats + +def get_trigger_service(session=None) -> TriggerService: + """Factory function to create a TriggerService instance with a fresh session.""" + return TriggerService(session) \ No newline at end of file diff --git a/server/app/type/trigger_types.py b/server/app/type/trigger_types.py new file mode 100644 index 000000000..93856bb68 --- /dev/null +++ b/server/app/type/trigger_types.py @@ -0,0 +1,51 @@ +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ========= Copyright 2025-2026 @ Eigent.ai All Rights Reserved. ========= + +from enum import StrEnum + +class TriggerType(StrEnum): + schedule = "schedule" + webhook = "webhook" + slack_trigger = "slack_trigger" + + +class TriggerStatus(StrEnum): + pending_verification = "pending_verification" + inactive = "inactive" + active = "active" + stale = "stale" + completed = "completed" + + +class ListenerType(StrEnum): + workforce = "workforce" + # chat_agent = "chat_agent" + +class ExecutionType(StrEnum): + scheduled = "scheduled" + webhook = "webhook" + manual = "manual" + slack = "slack" + +class ExecutionStatus(StrEnum): + pending = "pending" + running = "running" + completed = "completed" + failed = "failed" + cancelled = "cancelled" + missed = "missed" + +class RequestType(StrEnum): + GET = "GET" + POST = "POST" \ No newline at end of file diff --git a/server/celery/beat/start b/server/celery/beat/start new file mode 100644 index 000000000..0f16c6852 --- /dev/null +++ b/server/celery/beat/start @@ -0,0 +1,7 @@ +#!/bin/bash + +set -o errexit +set -o nounset + +rm -f './celerybeat.pid' +celery -A app.component.celery beat -l info \ No newline at end of file diff --git a/server/celery/worker/start b/server/celery/worker/start new file mode 100644 index 000000000..71d24f195 --- /dev/null +++ b/server/celery/worker/start @@ -0,0 +1,6 @@ +#!/bin/bash + +set -o errexit +set -o nounset + +celery -A app.component.celery worker --loglevel=info --queues=celery,poll_trigger_schedules,check_execution_timeouts \ No newline at end of file diff --git a/server/docker-compose.dev.yml b/server/docker-compose.dev.yml index 19fd97753..15685f034 100644 --- a/server/docker-compose.dev.yml +++ b/server/docker-compose.dev.yml @@ -1,5 +1,5 @@ services: - # PostgreSQL Database Only + # PostgreSQL Database postgres: image: postgres:15 container_name: eigent_postgres @@ -8,15 +8,32 @@ services: POSTGRES_DB: eigent POSTGRES_USER: postgres POSTGRES_PASSWORD: 123456 - POSTGRES_INITDB_ARGS: "--encoding=UTF-8 --lc-collate=C --lc-ctype=C" + POSTGRES_INITDB_ARGS: '--encoding=UTF-8 --lc-collate=C --lc-ctype=C' ports: - - "5432:5432" + - '5432:5432' volumes: - postgres_data:/var/lib/postgresql/data networks: - eigent_network healthcheck: - test: [ "CMD-SHELL", "pg_isready -U postgres -d eigent" ] + test: ['CMD-SHELL', 'pg_isready -U postgres -d eigent'] + interval: 10s + timeout: 5s + retries: 5 + + # Redis + redis: + image: redis:7-alpine + container_name: eigent-redis-dev + restart: unless-stopped + ports: + - '6379:6379' + volumes: + - eigent_redis_data:/data + networks: + - eigent_network + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] interval: 10s timeout: 5s retries: 5 @@ -24,6 +41,7 @@ services: volumes: postgres_data: driver: local + eigent_redis_data: networks: eigent_network: diff --git a/server/docker-compose.yml b/server/docker-compose.yml index b25ad462b..35b3f9b6d 100644 --- a/server/docker-compose.yml +++ b/server/docker-compose.yml @@ -8,16 +8,32 @@ services: POSTGRES_DB: eigent POSTGRES_USER: postgres POSTGRES_PASSWORD: 123456 - POSTGRES_INITDB_ARGS: "--encoding=UTF-8 --lc-collate=C --lc-ctype=C" + POSTGRES_INITDB_ARGS: '--encoding=UTF-8 --lc-collate=C --lc-ctype=C' ports: - - "5432:5432" + - '5432:5432' volumes: - postgres_data:/var/lib/postgresql/data #- ./init-db.sql:/docker-entrypoint-initdb.d/init-db.sql:ro networks: - eigent_network healthcheck: - test: [ "CMD-SHELL", "pg_isready -U postgres -d eigent" ] + test: ['CMD-SHELL', 'pg_isready -U postgres -d eigent'] + interval: 10s + timeout: 5s + retries: 5 + + redis: + image: redis:7-alpine + container_name: eigent-redis + restart: unless-stopped + ports: + - '6379:6379' + volumes: + - eigent_redis_data:/data + networks: + - eigent_network + healthcheck: + test: ['CMD', 'redis-cli', 'ping'] interval: 10s timeout: 5s retries: 5 @@ -32,11 +48,15 @@ services: container_name: eigent_api restart: unless-stopped ports: - - "3001:5678" + - '3001:5678' + env_file: + - .env environment: - - DATABASE_URL=postgresql://postgres:123456@postgres:5432/eigent - - ENVIRONMENT=production - - DEBUG=false + - database_url=postgresql://postgres:123456@postgres:5432/eigent + - redis_url=redis://redis:6379/0 + - celery_broker_url=redis://redis:6379/0 + - celery_result_url=redis://redis:6379/0 + - SESSION_REDIS_URL=redis://redis:6379/1 # volumes: # - ./app:/app/app # - ./alembic:/app/alembic @@ -45,18 +65,87 @@ services: depends_on: postgres: condition: service_healthy + redis: + condition: service_healthy + networks: + - eigent_network + healthcheck: + test: ['CMD', 'curl', '-f', 'http://localhost:5678/health'] + interval: 30s + timeout: 10s + retries: 3 + start_period: 40s + + # Celery Worker + celery_worker: + build: + context: .. + dockerfile: server/Dockerfile + args: + database_url: postgresql://postgres:123456@postgres:5432/eigent + container_name: eigent_celery_worker + restart: unless-stopped + command: /app/celery/worker/start + env_file: + - .env + environment: + - database_url=postgresql://postgres:123456@postgres:5432/eigent + - redis_url=redis://redis:6379/0 + - celery_broker_url=redis://redis:6379/0 + - celery_result_url=redis://redis:6379/0 + - SESSION_REDIS_URL=redis://redis:6379/1 + # volumes: + # - ./app:/app/app + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy networks: - eigent_network healthcheck: - test: [ "CMD", "curl", "-f", "http://localhost:5678/health" ] + test: + [ + 'CMD-SHELL', + 'celery -A app.component.celery inspect ping -d celery@$$HOSTNAME', + ] interval: 30s timeout: 10s retries: 3 start_period: 40s + # Celery Beat Scheduler + celery_beat: + build: + context: .. + dockerfile: server/Dockerfile + args: + database_url: postgresql://postgres:123456@postgres:5432/eigent + container_name: eigent_celery_beat + restart: unless-stopped + command: /app/celery/beat/start + env_file: + - .env + environment: + - database_url=postgresql://postgres:123456@postgres:5432/eigent + - redis_url=redis://redis:6379/0 + - celery_broker_url=redis://redis:6379/0 + - celery_result_url=redis://redis:6379/0 + - SESSION_REDIS_URL=redis://redis:6379/1 + # volumes: + # - ./app:/app/app + depends_on: + postgres: + condition: service_healthy + redis: + condition: service_healthy + networks: + - eigent_network + volumes: postgres_data: driver: local + eigent_redis_data: networks: eigent_network: diff --git a/server/pyproject.toml b/server/pyproject.toml index 3b49d60e5..6620d9197 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -34,6 +34,9 @@ dependencies = [ "cryptography>=45.0.4", "sqids>=0.5.2", "exa-py>=1.14.16", + "fastapi-limiter>=0.1.6", + "slack-sdk>=3.39.0", + "celery>=5.6.2", ] [tool.ruff] @@ -69,3 +72,6 @@ combine-as-imports = true [tool.ruff.format] quote-style = "double" indent-style = "space" + +[tool.uv.sources] +camel-ai = { git = "https://github.com/camel-ai/camel.git", rev = "feat-trigger" } diff --git a/server/uv.lock b/server/uv.lock index ea31b69e8..72e76bb97 100644 --- a/server/uv.lock +++ b/server/uv.lock @@ -1,5 +1,5 @@ version = 1 -revision = 3 +revision = 1 requires-python = "==3.12.*" resolution-markers = [ "sys_platform == 'win32'", @@ -7,6 +7,62 @@ resolution-markers = [ "sys_platform != 'emscripten' and sys_platform != 'win32'", ] +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265 }, +] + +[[package]] +name = "aiohttp" +version = "3.13.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/50/42/32cf8e7704ceb4481406eb87161349abb46a57fee3f008ba9cb610968646/aiohttp-3.13.3.tar.gz", hash = "sha256:a949eee43d3782f2daae4f4a2819b2cb9b0c5d3b7f7a927067cc84dafdbb9f88", size = 7844556 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/be/4fc11f202955a69e0db803a12a062b8379c970c7c84f4882b6da17337cc1/aiohttp-3.13.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b903a4dfee7d347e2d87697d0713be59e0b87925be030c9178c5faa58ea58d5c", size = 739732 }, + { url = "https://files.pythonhosted.org/packages/97/2c/621d5b851f94fa0bb7430d6089b3aa970a9d9b75196bc93bb624b0db237a/aiohttp-3.13.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a45530014d7a1e09f4a55f4f43097ba0fd155089372e105e4bff4ca76cb1b168", size = 494293 }, + { url = "https://files.pythonhosted.org/packages/5d/43/4be01406b78e1be8320bb8316dc9c42dbab553d281c40364e0f862d5661c/aiohttp-3.13.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:27234ef6d85c914f9efeb77ff616dbf4ad2380be0cda40b4db086ffc7ddd1b7d", size = 493533 }, + { url = "https://files.pythonhosted.org/packages/8d/a8/5a35dc56a06a2c90d4742cbf35294396907027f80eea696637945a106f25/aiohttp-3.13.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d32764c6c9aafb7fb55366a224756387cd50bfa720f32b88e0e6fa45b27dcf29", size = 1737839 }, + { url = "https://files.pythonhosted.org/packages/bf/62/4b9eeb331da56530bf2e198a297e5303e1c1ebdceeb00fe9b568a65c5a0c/aiohttp-3.13.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b1a6102b4d3ebc07dad44fbf07b45bb600300f15b552ddf1851b5390202ea2e3", size = 1703932 }, + { url = "https://files.pythonhosted.org/packages/7c/f6/af16887b5d419e6a367095994c0b1332d154f647e7dc2bd50e61876e8e3d/aiohttp-3.13.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c014c7ea7fb775dd015b2d3137378b7be0249a448a1612268b5a90c2d81de04d", size = 1771906 }, + { url = "https://files.pythonhosted.org/packages/ce/83/397c634b1bcc24292fa1e0c7822800f9f6569e32934bdeef09dae7992dfb/aiohttp-3.13.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2b8d8ddba8f95ba17582226f80e2de99c7a7948e66490ef8d947e272a93e9463", size = 1871020 }, + { url = "https://files.pythonhosted.org/packages/86/f6/a62cbbf13f0ac80a70f71b1672feba90fdb21fd7abd8dbf25c0105fb6fa3/aiohttp-3.13.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9ae8dd55c8e6c4257eae3a20fd2c8f41edaea5992ed67156642493b8daf3cecc", size = 1755181 }, + { url = "https://files.pythonhosted.org/packages/0a/87/20a35ad487efdd3fba93d5843efdfaa62d2f1479eaafa7453398a44faf13/aiohttp-3.13.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:01ad2529d4b5035578f5081606a465f3b814c542882804e2e8cda61adf5c71bf", size = 1561794 }, + { url = "https://files.pythonhosted.org/packages/de/95/8fd69a66682012f6716e1bc09ef8a1a2a91922c5725cb904689f112309c4/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bb4f7475e359992b580559e008c598091c45b5088f28614e855e42d39c2f1033", size = 1697900 }, + { url = "https://files.pythonhosted.org/packages/e5/66/7b94b3b5ba70e955ff597672dad1691333080e37f50280178967aff68657/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c19b90316ad3b24c69cd78d5c9b4f3aa4497643685901185b65166293d36a00f", size = 1728239 }, + { url = "https://files.pythonhosted.org/packages/47/71/6f72f77f9f7d74719692ab65a2a0252584bf8d5f301e2ecb4c0da734530a/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:96d604498a7c782cb15a51c406acaea70d8c027ee6b90c569baa6e7b93073679", size = 1740527 }, + { url = "https://files.pythonhosted.org/packages/fa/b4/75ec16cbbd5c01bdaf4a05b19e103e78d7ce1ef7c80867eb0ace42ff4488/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:084911a532763e9d3dd95adf78a78f4096cd5f58cdc18e6fdbc1b58417a45423", size = 1554489 }, + { url = "https://files.pythonhosted.org/packages/52/8f/bc518c0eea29f8406dcf7ed1f96c9b48e3bc3995a96159b3fc11f9e08321/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7a4a94eb787e606d0a09404b9c38c113d3b099d508021faa615d70a0131907ce", size = 1767852 }, + { url = "https://files.pythonhosted.org/packages/9d/f2/a07a75173124f31f11ea6f863dc44e6f09afe2bca45dd4e64979490deab1/aiohttp-3.13.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:87797e645d9d8e222e04160ee32aa06bc5c163e8499f24db719e7852ec23093a", size = 1722379 }, + { url = "https://files.pythonhosted.org/packages/3c/4a/1a3fee7c21350cac78e5c5cef711bac1b94feca07399f3d406972e2d8fcd/aiohttp-3.13.3-cp312-cp312-win32.whl", hash = "sha256:b04be762396457bef43f3597c991e192ee7da460a4953d7e647ee4b1c28e7046", size = 428253 }, + { url = "https://files.pythonhosted.org/packages/d9/b7/76175c7cb4eb73d91ad63c34e29fc4f77c9386bba4a65b53ba8e05ee3c39/aiohttp-3.13.3-cp312-cp312-win_amd64.whl", hash = "sha256:e3531d63d3bdfa7e3ac5e9b27b2dd7ec9df3206a98e0b3445fa906f233264c57", size = 455407 }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490 }, +] + [[package]] name = "alembic" version = "1.18.1" @@ -16,27 +72,39 @@ dependencies = [ { name = "sqlalchemy" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/49/cc/aca263693b2ece99fa99a09b6d092acb89973eb2bb575faef1777e04f8b4/alembic-1.18.1.tar.gz", hash = "sha256:83ac6b81359596816fb3b893099841a0862f2117b2963258e965d70dc62fb866", size = 2044319, upload-time = "2026-01-14T18:53:14.907Z" } +sdist = { url = "https://files.pythonhosted.org/packages/49/cc/aca263693b2ece99fa99a09b6d092acb89973eb2bb575faef1777e04f8b4/alembic-1.18.1.tar.gz", hash = "sha256:83ac6b81359596816fb3b893099841a0862f2117b2963258e965d70dc62fb866", size = 2044319 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/36/cd9cb6101e81e39076b2fbe303bfa3c85ca34e55142b0324fcbf22c5c6e2/alembic-1.18.1-py3-none-any.whl", hash = "sha256:f1c3b0920b87134e851c25f1f7f236d8a332c34b75416802d06971df5d1b7810", size = 260973 }, +] + +[[package]] +name = "amqp" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/79/fc/ec94a357dfc6683d8c86f8b4cfa5416a4c36b28052ec8260c77aca96a443/amqp-5.3.1.tar.gz", hash = "sha256:cddc00c725449522023bad949f70fff7b48f0b1ade74d170a6f10ab044739432", size = 129013 } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/36/cd9cb6101e81e39076b2fbe303bfa3c85ca34e55142b0324fcbf22c5c6e2/alembic-1.18.1-py3-none-any.whl", hash = "sha256:f1c3b0920b87134e851c25f1f7f236d8a332c34b75416802d06971df5d1b7810", size = 260973, upload-time = "2026-01-14T18:53:17.533Z" }, + { url = "https://files.pythonhosted.org/packages/26/99/fc813cd978842c26c82534010ea849eee9ab3a13ea2b74e95cb9c99e747b/amqp-5.3.1-py3-none-any.whl", hash = "sha256:43b3319e1b4e7d1251833a93d672b4af1e40f3d632d479b98661a95f117880a2", size = 50944 }, ] [[package]] name = "annotated-doc" version = "0.0.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303 }, ] [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, ] [[package]] @@ -47,9 +115,9 @@ dependencies = [ { name = "idna" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685 } wheels = [ - { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592 }, ] [[package]] @@ -60,64 +128,75 @@ dependencies = [ { name = "python-dateutil" }, { name = "tzdata" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/33/032cdc44182491aa708d06a68b62434140d8c50820a087fac7af37703357/arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7", size = 152931, upload-time = "2025-10-18T17:46:46.761Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/33/032cdc44182491aa708d06a68b62434140d8c50820a087fac7af37703357/arrow-1.4.0.tar.gz", hash = "sha256:ed0cc050e98001b8779e84d461b0098c4ac597e88704a655582b21d116e526d7", size = 152931 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205", size = 68797, upload-time = "2025-10-18T17:46:45.663Z" }, + { url = "https://files.pythonhosted.org/packages/ed/c9/d7977eaacb9df673210491da99e6a247e93df98c715fc43fd136ce1d3d33/arrow-1.4.0-py3-none-any.whl", hash = "sha256:749f0769958ebdc79c173ff0b0670d59051a535fa26e8eba02953dc19eb43205", size = 68797 }, ] [[package]] name = "astor" version = "0.8.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/21/75b771132fee241dfe601d39ade629548a9626d1d39f333fde31bc46febe/astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e", size = 35090, upload-time = "2019-12-10T01:50:35.51Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/21/75b771132fee241dfe601d39ade629548a9626d1d39f333fde31bc46febe/astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e", size = 35090 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c3/88/97eef84f48fa04fbd6750e62dcceafba6c63c81b7ac1420856c8dcc0a3f9/astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", size = 27488, upload-time = "2019-12-10T01:50:33.628Z" }, + { url = "https://files.pythonhosted.org/packages/c3/88/97eef84f48fa04fbd6750e62dcceafba6c63c81b7ac1420856c8dcc0a3f9/astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5", size = 27488 }, ] [[package]] name = "attrs" version = "25.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251, upload-time = "2025-10-06T13:54:44.725Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6b/5c/685e6633917e101e5dcb62b9dd76946cbb57c26e133bae9e0cd36033c0a9/attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11", size = 934251 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615, upload-time = "2025-10-06T13:54:43.17Z" }, + { url = "https://files.pythonhosted.org/packages/3a/2a/7cc015f5b9f5db42b7d48157e23356022889fc354a2813c15934b7cb5c0e/attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373", size = 67615 }, ] [[package]] name = "babel" version = "2.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537 }, ] [[package]] name = "bcrypt" version = "4.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/ae/3af7d006aacf513975fd1948a6b4d6f8b4a307f8a244e1a3d3774b297aad/bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd", size = 25498, upload-time = "2022-10-09T15:36:49.775Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/ae/3af7d006aacf513975fd1948a6b4d6f8b4a307f8a244e1a3d3774b297aad/bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd", size = 25498 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/d4/3b2657bd58ef02b23a07729b0df26f21af97169dbd0b5797afa9e97ebb49/bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f", size = 473446 }, + { url = "https://files.pythonhosted.org/packages/ec/0a/1582790232fef6c2aa201f345577306b8bfe465c2c665dec04c86a016879/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0", size = 583044 }, + { url = "https://files.pythonhosted.org/packages/41/16/49ff5146fb815742ad58cafb5034907aa7f166b1344d0ddd7fd1c818bd17/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410", size = 583189 }, + { url = "https://files.pythonhosted.org/packages/aa/48/fd2b197a9741fa790ba0b88a9b10b5e88e62ff5cf3e1bc96d8354d7ce613/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344", size = 593473 }, + { url = "https://files.pythonhosted.org/packages/7d/50/e683d8418974a602ba40899c8a5c38b3decaf5a4d36c32fc65dce454d8a8/bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a", size = 593249 }, + { url = "https://files.pythonhosted.org/packages/fb/a7/ee4561fd9b78ca23c8e5591c150cc58626a5dfb169345ab18e1c2c664ee0/bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3", size = 583586 }, + { url = "https://files.pythonhosted.org/packages/64/fe/da28a5916128d541da0993328dc5cf4b43dfbf6655f2c7a2abe26ca2dc88/bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2", size = 593659 }, + { url = "https://files.pythonhosted.org/packages/dd/4f/3632a69ce344c1551f7c9803196b191a8181c6a1ad2362c225581ef0d383/bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535", size = 613116 }, + { url = "https://files.pythonhosted.org/packages/87/69/edacb37481d360d06fc947dab5734aaf511acb7d1a1f9e2849454376c0f8/bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e", size = 624290 }, + { url = "https://files.pythonhosted.org/packages/aa/ca/6a534669890725cbb8c1fb4622019be31813c8edaa7b6d5b62fc9360a17e/bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab", size = 159428 }, + { url = "https://files.pythonhosted.org/packages/46/81/d8c22cd7e5e1c6a7d48e41a1d1d46c92f17dae70a54d9814f746e6027dec/bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9", size = 152930 }, +] + +[[package]] +name = "billiard" +version = "4.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/23/b12ac0bcdfb7360d664f40a00b1bda139cbbbced012c34e375506dbd0143/billiard-4.2.4.tar.gz", hash = "sha256:55f542c371209e03cd5862299b74e52e4fbcba8250ba611ad94276b369b6a85f", size = 156537 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/d4/3b2657bd58ef02b23a07729b0df26f21af97169dbd0b5797afa9e97ebb49/bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f", size = 473446, upload-time = "2022-10-09T15:36:25.481Z" }, - { url = "https://files.pythonhosted.org/packages/ec/0a/1582790232fef6c2aa201f345577306b8bfe465c2c665dec04c86a016879/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0", size = 583044, upload-time = "2022-10-09T15:37:09.447Z" }, - { url = "https://files.pythonhosted.org/packages/41/16/49ff5146fb815742ad58cafb5034907aa7f166b1344d0ddd7fd1c818bd17/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410", size = 583189, upload-time = "2022-10-09T15:37:10.69Z" }, - { url = "https://files.pythonhosted.org/packages/aa/48/fd2b197a9741fa790ba0b88a9b10b5e88e62ff5cf3e1bc96d8354d7ce613/bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344", size = 593473, upload-time = "2022-10-09T15:36:27.195Z" }, - { url = "https://files.pythonhosted.org/packages/7d/50/e683d8418974a602ba40899c8a5c38b3decaf5a4d36c32fc65dce454d8a8/bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a", size = 593249, upload-time = "2022-10-09T15:36:28.481Z" }, - { url = "https://files.pythonhosted.org/packages/fb/a7/ee4561fd9b78ca23c8e5591c150cc58626a5dfb169345ab18e1c2c664ee0/bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3", size = 583586, upload-time = "2022-10-09T15:37:11.962Z" }, - { url = "https://files.pythonhosted.org/packages/64/fe/da28a5916128d541da0993328dc5cf4b43dfbf6655f2c7a2abe26ca2dc88/bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2", size = 593659, upload-time = "2022-10-09T15:36:30.049Z" }, - { url = "https://files.pythonhosted.org/packages/dd/4f/3632a69ce344c1551f7c9803196b191a8181c6a1ad2362c225581ef0d383/bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535", size = 613116, upload-time = "2022-10-09T15:37:14.107Z" }, - { url = "https://files.pythonhosted.org/packages/87/69/edacb37481d360d06fc947dab5734aaf511acb7d1a1f9e2849454376c0f8/bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e", size = 624290, upload-time = "2022-10-09T15:36:31.251Z" }, - { url = "https://files.pythonhosted.org/packages/aa/ca/6a534669890725cbb8c1fb4622019be31813c8edaa7b6d5b62fc9360a17e/bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab", size = 159428, upload-time = "2022-10-09T15:36:32.893Z" }, - { url = "https://files.pythonhosted.org/packages/46/81/d8c22cd7e5e1c6a7d48e41a1d1d46c92f17dae70a54d9814f746e6027dec/bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9", size = 152930, upload-time = "2022-10-09T15:36:34.635Z" }, + { url = "https://files.pythonhosted.org/packages/cb/87/8bab77b323f16d67be364031220069f79159117dd5e43eeb4be2fef1ac9b/billiard-4.2.4-py3-none-any.whl", hash = "sha256:525b42bdec68d2b983347ac312f892db930858495db601b5836ac24e6477cde5", size = 87070 }, ] [[package]] name = "camel-ai" version = "0.2.85a0" -source = { registry = "https://pypi.org/simple" } +source = { git = "https://github.com/camel-ai/camel.git?rev=feat-trigger#22c5a8362403a0286e2698d5a12c84af35b7f8bf" } dependencies = [ + { name = "aiohttp" }, { name = "astor" }, { name = "colorama" }, + { name = "croniter" }, { name = "docstring-parser" }, { name = "google-search-results" }, { name = "httpx" }, @@ -130,18 +209,34 @@ dependencies = [ { name = "tiktoken" }, { name = "websockets" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/54/ab/7d305f80e868a60c7097ab510063a171e1798d163b5f8fd7fe7c16553e13/camel_ai-0.2.85a0.tar.gz", hash = "sha256:432de9bac1e40bd4ebf434ca80eaf3993121f87924820e26ad2bad69c1fb5cf5", size = 1126159, upload-time = "2026-01-23T02:24:08.868Z" } + +[[package]] +name = "celery" +version = "5.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "billiard" }, + { name = "click" }, + { name = "click-didyoumean" }, + { name = "click-plugins" }, + { name = "click-repl" }, + { name = "kombu" }, + { name = "python-dateutil" }, + { name = "tzlocal" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8f/9d/3d13596519cfa7207a6f9834f4b082554845eb3cd2684b5f8535d50c7c44/celery-5.6.2.tar.gz", hash = "sha256:4a8921c3fcf2ad76317d3b29020772103581ed2454c4c042cc55dcc43585009b", size = 1718802 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/0c/35d73b5d648413844bdfeaf95172a6b7c19802150829f5f907753a773d19/camel_ai-0.2.85a0-py3-none-any.whl", hash = "sha256:6045e9af72fee918ca3acc92f3b4af8af084af7b0cf6435c01a1252bd04ae6b3", size = 1599866, upload-time = "2026-01-23T02:24:06.78Z" }, + { url = "https://files.pythonhosted.org/packages/dd/bd/9ecd619e456ae4ba73b6583cc313f26152afae13e9a82ac4fe7f8856bfd1/celery-5.6.2-py3-none-any.whl", hash = "sha256:3ffafacbe056951b629c7abcf9064c4a2366de0bdfc9fdba421b97ebb68619a5", size = 445502 }, ] [[package]] name = "certifi" version = "2026.1.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268, upload-time = "2026-01-04T02:42:41.825Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e0/2d/a891ca51311197f6ad14a7ef42e2399f36cf2f9bd44752b3dc4eab60fdc5/certifi-2026.1.4.tar.gz", hash = "sha256:ac726dd470482006e014ad384921ed6438c457018f4b3d204aea4281258b2120", size = 154268 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900, upload-time = "2026-01-04T02:42:40.15Z" }, + { url = "https://files.pythonhosted.org/packages/e6/ad/3cc14f097111b4de0040c83a525973216457bbeeb63739ef1ed275c1c021/certifi-2026.1.4-py3-none-any.whl", hash = "sha256:9943707519e4add1115f44c2bc244f782c0249876bf51b6599fee1ffbedd685c", size = 152900 }, ] [[package]] @@ -151,45 +246,45 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pycparser", marker = "implementation_name != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, - { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, - { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, - { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, - { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, - { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, - { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, - { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, - { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, - { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, - { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271 }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048 }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529 }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097 }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983 }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519 }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572 }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963 }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361 }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932 }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557 }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762 }, ] [[package]] name = "charset-normalizer" version = "3.4.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" } +sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" }, - { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" }, - { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" }, - { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" }, - { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" }, - { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" }, - { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" }, - { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" }, - { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" }, - { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" }, - { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" }, - { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" }, - { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" }, - { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" }, - { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" }, + { url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425 }, + { url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162 }, + { url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558 }, + { url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497 }, + { url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240 }, + { url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471 }, + { url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864 }, + { url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647 }, + { url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110 }, + { url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839 }, + { url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667 }, + { url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535 }, + { url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816 }, + { url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694 }, + { url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131 }, + { url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390 }, + { url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402 }, ] [[package]] @@ -199,27 +294,77 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274 }, +] + +[[package]] +name = "click-didyoumean" +version = "0.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/ce/217289b77c590ea1e7c24242d9ddd6e249e52c795ff10fac2c50062c48cb/click_didyoumean-0.3.1.tar.gz", hash = "sha256:4f82fdff0dbe64ef8ab2279bd6aa3f6a99c3b28c05aa09cbfc07c9d7fbb5a463", size = 3089 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/5b/974430b5ffdb7a4f1941d13d83c64a0395114503cc357c6b9ae4ce5047ed/click_didyoumean-0.3.1-py3-none-any.whl", hash = "sha256:5c4bb6007cfea5f2fd6583a2fb6701a22a41eb98957e63d0fac41c10e7c3117c", size = 3631 }, +] + +[[package]] +name = "click-plugins" +version = "1.1.1.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/a4/34847b59150da33690a36da3681d6bbc2ec14ee9a846bc30a6746e5984e4/click_plugins-1.1.1.2.tar.gz", hash = "sha256:d7af3984a99d243c131aa1a828331e7630f4a88a9741fd05c927b204bcf92261", size = 8343 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/9a/2abecb28ae875e39c8cad711eb1186d8d14eab564705325e77e4e6ab9ae5/click_plugins-1.1.1.2-py2.py3-none-any.whl", hash = "sha256:008d65743833ffc1f5417bf0e78e8d2c23aab04d9745ba817bd3e71b0feb6aa6", size = 11051 }, +] + +[[package]] +name = "click-repl" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "prompt-toolkit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/a2/57f4ac79838cfae6912f997b4d1a64a858fb0c86d7fcaae6f7b58d267fca/click-repl-0.3.0.tar.gz", hash = "sha256:17849c23dba3d667247dc4defe1757fff98694e90fe37474f3feebb69ced26a9", size = 10449 } wheels = [ - { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, + { url = "https://files.pythonhosted.org/packages/52/40/9d857001228658f0d59e97ebd4c346fe73e138c6de1bce61dc568a57c7f8/click_repl-0.3.0-py3-none-any.whl", hash = "sha256:fb7e06deb8da8de86180a33a9da97ac316751c094c6899382da7feeeeb51b812", size = 10289 }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, ] [[package]] name = "convert-case" version = "1.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6c/ac/22e9945f24acae18c43d1ff01f17ed792d4ba80b9d0757f2d18d23ce82ec/convert-case-1.2.3.tar.gz", hash = "sha256:a8c4329e47233a2b16cac3c5d020e8ba0305293efbe22a6d80f8ffddf049703f", size = 6984, upload-time = "2023-05-23T19:27:09.469Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6c/ac/22e9945f24acae18c43d1ff01f17ed792d4ba80b9d0757f2d18d23ce82ec/convert-case-1.2.3.tar.gz", hash = "sha256:a8c4329e47233a2b16cac3c5d020e8ba0305293efbe22a6d80f8ffddf049703f", size = 6984 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/2e/500ff29726ef207fdf6b625e62caf3839662c5d845897efc93bdf019192a/convert_case-1.2.3-py3-none-any.whl", hash = "sha256:ec8884050ca548e990666f82cba7ae2edfaa3c85dbead3042c2fd663b292373a", size = 9373, upload-time = "2023-05-23T19:27:06.039Z" }, + { url = "https://files.pythonhosted.org/packages/f3/2e/500ff29726ef207fdf6b625e62caf3839662c5d845897efc93bdf019192a/convert_case-1.2.3-py3-none-any.whl", hash = "sha256:ec8884050ca548e990666f82cba7ae2edfaa3c85dbead3042c2fd663b292373a", size = 9373 }, +] + +[[package]] +name = "croniter" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468 }, ] [[package]] @@ -229,65 +374,65 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004 }, + { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667 }, + { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807 }, + { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615 }, + { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800 }, + { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707 }, + { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541 }, + { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464 }, + { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838 }, + { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596 }, + { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782 }, + { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381 }, + { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988 }, + { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451 }, + { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007 }, + { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248 }, + { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089 }, + { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029 }, + { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222 }, + { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280 }, + { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958 }, + { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714 }, + { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970 }, + { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236 }, + { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642 }, + { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126 }, + { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573 }, + { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695 }, + { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720 }, + { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740 }, ] [[package]] name = "distro" version = "1.9.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, ] [[package]] name = "dnspython" version = "2.8.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" }, + { url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094 }, ] [[package]] name = "docstring-parser" version = "0.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442, upload-time = "2025-07-21T07:35:01.868Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/9d/c3b43da9515bd270df0f80548d9944e389870713cc1fe2b8fb35fe2bcefd/docstring_parser-0.17.0.tar.gz", hash = "sha256:583de4a309722b3315439bb31d64ba3eebada841f2e2cee23b99df001434c912", size = 27442 } wheels = [ - { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896, upload-time = "2025-07-21T07:35:00.684Z" }, + { url = "https://files.pythonhosted.org/packages/55/e2/2537ebcff11c1ee1ff17d8d0b6f4db75873e3b0fb32c2d4a2ee31ecb310a/docstring_parser-0.17.0-py3-none-any.whl", hash = "sha256:cf2569abd23dce8099b300f9b4fa8191e9582dda731fd533daf54c4551658708", size = 36896 }, ] [[package]] @@ -299,6 +444,7 @@ dependencies = [ { name = "arrow" }, { name = "bcrypt" }, { name = "camel-ai" }, + { name = "celery" }, { name = "click" }, { name = "convert-case" }, { name = "cryptography" }, @@ -306,6 +452,7 @@ dependencies = [ { name = "fastapi" }, { name = "fastapi-babel" }, { name = "fastapi-filter" }, + { name = "fastapi-limiter" }, { name = "fastapi-pagination" }, { name = "httpx" }, { name = "itsdangerous" }, @@ -321,6 +468,7 @@ dependencies = [ { name = "python-dotenv" }, { name = "python-multipart" }, { name = "requests" }, + { name = "slack-sdk" }, { name = "sqids" }, { name = "sqlalchemy-utils" }, { name = "sqlmodel" }, @@ -331,7 +479,8 @@ requires-dist = [ { name = "alembic", specifier = ">=1.15.2" }, { name = "arrow", specifier = ">=1.3.0" }, { name = "bcrypt", specifier = "==4.0.1" }, - { name = "camel-ai", specifier = "==0.2.85a0" }, + { name = "camel-ai", git = "https://github.com/camel-ai/camel.git?rev=feat-trigger" }, + { name = "celery", specifier = ">=5.6.2" }, { name = "click", specifier = ">=8.1.8" }, { name = "convert-case", specifier = ">=1.2.3" }, { name = "cryptography", specifier = ">=45.0.4" }, @@ -339,6 +488,7 @@ requires-dist = [ { name = "fastapi", specifier = ">=0.115.12" }, { name = "fastapi-babel", specifier = ">=1.0.0" }, { name = "fastapi-filter", specifier = ">=2.0.1" }, + { name = "fastapi-limiter", specifier = ">=0.1.6" }, { name = "fastapi-pagination", specifier = ">=0.12.34" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "itsdangerous", specifier = ">=2.2.0" }, @@ -354,6 +504,7 @@ requires-dist = [ { name = "python-dotenv", specifier = ">=1.1.0" }, { name = "python-multipart", specifier = ">=0.0.20" }, { name = "requests", specifier = ">=2.32.4" }, + { name = "slack-sdk", specifier = ">=3.39.0" }, { name = "sqids", specifier = ">=0.5.2" }, { name = "sqlalchemy-utils", specifier = ">=0.41.2" }, { name = "sqlmodel", specifier = ">=0.0.24" }, @@ -367,18 +518,18 @@ dependencies = [ { name = "dnspython" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238, upload-time = "2025-08-26T13:09:06.831Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/22/900cb125c76b7aaa450ce02fd727f452243f2e91a61af068b40adba60ea9/email_validator-2.3.0.tar.gz", hash = "sha256:9fc05c37f2f6cf439ff414f8fc46d917929974a82244c20eb10231ba60c54426", size = 51238 } wheels = [ - { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, + { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604 }, ] [[package]] name = "et-xmlfile" version = "2.0.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059 }, ] [[package]] @@ -390,9 +541,9 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/68/6f/40faa42669c1135aab83681e1515c4ce0a81ea847132caa16b52268cedfc/exa_py-2.1.1.tar.gz", hash = "sha256:e420937bf8249ddab4624681f15c78dd5433b6658783f1561c70ba8a0cd023a9", size = 45483, upload-time = "2026-01-22T10:29:49.751Z" } +sdist = { url = "https://files.pythonhosted.org/packages/68/6f/40faa42669c1135aab83681e1515c4ce0a81ea847132caa16b52268cedfc/exa_py-2.1.1.tar.gz", hash = "sha256:e420937bf8249ddab4624681f15c78dd5433b6658783f1561c70ba8a0cd023a9", size = 45483 } wheels = [ - { url = "https://files.pythonhosted.org/packages/02/31/37142a915a5590d307c9e15dfff4c2db3527bf1ec4b1b886b4ea6c6127be/exa_py-2.1.1-py3-none-any.whl", hash = "sha256:ec61609dfcfcdae8b9045d94a3cdc27376a38cd1808666a389a85d8eed14358b", size = 58003, upload-time = "2026-01-22T10:29:48.783Z" }, + { url = "https://files.pythonhosted.org/packages/02/31/37142a915a5590d307c9e15dfff4c2db3527bf1ec4b1b886b4ea6c6127be/exa_py-2.1.1-py3-none-any.whl", hash = "sha256:ec61609dfcfcdae8b9045d94a3cdc27376a38cd1808666a389a85d8eed14358b", size = 58003 }, ] [[package]] @@ -405,9 +556,9 @@ dependencies = [ { name = "starlette" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682, upload-time = "2025-12-27T15:21:13.714Z" } +sdist = { url = "https://files.pythonhosted.org/packages/52/08/8c8508db6c7b9aae8f7175046af41baad690771c9bcde676419965e338c7/fastapi-0.128.0.tar.gz", hash = "sha256:1cc179e1cef10a6be60ffe429f79b829dce99d8de32d7acb7e6c8dfdf7f2645a", size = 365682 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094, upload-time = "2025-12-27T15:21:12.154Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5cbb59154b093548acd0f4c7c474a118eda06da25aa75c616b72d8fcd92a/fastapi-0.128.0-py3-none-any.whl", hash = "sha256:aebd93f9716ee3b4f4fcfe13ffb7cf308d99c9f3ab5622d8877441072561582d", size = 103094 }, ] [[package]] @@ -419,9 +570,9 @@ dependencies = [ { name = "fastapi" }, { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ea/0d/271af537fddc3c08e5f6e36c4e9f12c55d98ff9240a37454125b3c772fd2/fastapi_babel-1.0.0.tar.gz", hash = "sha256:a70005e132b6cfc611a5a02601c63bcd26a1b1cb689d7295be4587c9d35402f3", size = 12937, upload-time = "2024-12-05T20:48:09.838Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/0d/271af537fddc3c08e5f6e36c4e9f12c55d98ff9240a37454125b3c772fd2/fastapi_babel-1.0.0.tar.gz", hash = "sha256:a70005e132b6cfc611a5a02601c63bcd26a1b1cb689d7295be4587c9d35402f3", size = 12937 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/d5/bc9fa86cb3fb3fa040c5841562ea874bbdc069b660739a314cf75df06200/fastapi_babel-1.0.0-py3-none-any.whl", hash = "sha256:9be639b098dd07dfe5b811df318abdd7e282622ac20304d909151f46b09a087d", size = 11789, upload-time = "2024-12-05T20:48:07.847Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d5/bc9fa86cb3fb3fa040c5841562ea874bbdc069b660739a314cf75df06200/fastapi_babel-1.0.0-py3-none-any.whl", hash = "sha256:9be639b098dd07dfe5b811df318abdd7e282622ac20304d909151f46b09a087d", size = 11789 }, ] [[package]] @@ -432,9 +583,22 @@ dependencies = [ { name = "fastapi" }, { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/ed/c36cfcd849519fd2d23051ad81a91fc5e8cfa7109496fc8a10ad565a5fe9/fastapi_filter-2.0.1.tar.gz", hash = "sha256:cffda370097af7e404f1eb188aca58b199084bfaf7cec881e40b404adf12566e", size = 9857, upload-time = "2024-12-07T17:30:06.343Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/ed/c36cfcd849519fd2d23051ad81a91fc5e8cfa7109496fc8a10ad565a5fe9/fastapi_filter-2.0.1.tar.gz", hash = "sha256:cffda370097af7e404f1eb188aca58b199084bfaf7cec881e40b404adf12566e", size = 9857 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5e/88/afc022ad64d12f730141fc50758ecf9d60de5fed11335dc16e3127617f05/fastapi_filter-2.0.1-py3-none-any.whl", hash = "sha256:711d48707ec62f7c9e12a7713fc0f6a99858a9e3741b4d108102d5599e77197d", size = 11586, upload-time = "2024-12-07T17:30:05.375Z" }, + { url = "https://files.pythonhosted.org/packages/5e/88/afc022ad64d12f730141fc50758ecf9d60de5fed11335dc16e3127617f05/fastapi_filter-2.0.1-py3-none-any.whl", hash = "sha256:711d48707ec62f7c9e12a7713fc0f6a99858a9e3741b4d108102d5599e77197d", size = 11586 }, +] + +[[package]] +name = "fastapi-limiter" +version = "0.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "fastapi" }, + { name = "redis" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/7f/99/c7903234488d4dca5f9bccb4f88c2f582a234f0dca33348781c9cf8a48c6/fastapi_limiter-0.1.6.tar.gz", hash = "sha256:6f5fde8efebe12eb33861bdffb91009f699369a3c2862cdc7c1d9acf912ff443", size = 8307 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/b5/6f6b4d18bee1cafc857eae12738b3a03b7d1102b833668be868938c57b9d/fastapi_limiter-0.1.6-py3-none-any.whl", hash = "sha256:2e53179a4208b8f2c8795e38bb001324d3dc37d2800ff49fd28ec5caabf7a240", size = 15829 }, ] [[package]] @@ -446,9 +610,34 @@ dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a0/da/ad34e0fc98ca9731b0f76d07faeb39d525cb80440ac5814e270cb379d92a/fastapi_pagination-0.15.6.tar.gz", hash = "sha256:c59ca1aa056dccee3526953357c2d1128f988f83d3034d95ddb8de6f5a68e9f8", size = 573720, upload-time = "2026-01-11T22:15:36.385Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/da/ad34e0fc98ca9731b0f76d07faeb39d525cb80440ac5814e270cb379d92a/fastapi_pagination-0.15.6.tar.gz", hash = "sha256:c59ca1aa056dccee3526953357c2d1128f988f83d3034d95ddb8de6f5a68e9f8", size = 573720 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/01/28/0cf3b51115e98c0b84553b9e11ec07f59ae580bf8585eb7876fa9afe4c7a/fastapi_pagination-0.15.6-py3-none-any.whl", hash = "sha256:5c44bfaa78c1c968ca6f027b01a27c1805194c7cc8776eb84ec78235abcdaece", size = 59624 }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875 } wheels = [ - { url = "https://files.pythonhosted.org/packages/01/28/0cf3b51115e98c0b84553b9e11ec07f59ae580bf8585eb7876fa9afe4c7a/fastapi_pagination-0.15.6-py3-none-any.whl", hash = "sha256:5c44bfaa78c1c968ca6f027b01a27c1805194c7cc8776eb84ec78235abcdaece", size = 59624, upload-time = "2026-01-11T22:15:37.746Z" }, + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782 }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594 }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448 }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411 }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014 }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909 }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049 }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485 }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619 }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320 }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820 }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518 }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096 }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985 }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591 }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102 }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409 }, ] [[package]] @@ -458,30 +647,31 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/77/30/b3a6f6a2e00f8153549c2fa345c58ae1ce8e5f3153c2fe0484d444c3abcb/google_search_results-2.4.2.tar.gz", hash = "sha256:603a30ecae2af8e600b22635757a6df275dad4b934f975e67878ccd640b78245", size = 18818, upload-time = "2023-03-10T11:13:09.953Z" } +sdist = { url = "https://files.pythonhosted.org/packages/77/30/b3a6f6a2e00f8153549c2fa345c58ae1ce8e5f3153c2fe0484d444c3abcb/google_search_results-2.4.2.tar.gz", hash = "sha256:603a30ecae2af8e600b22635757a6df275dad4b934f975e67878ccd640b78245", size = 18818 } [[package]] name = "greenlet" version = "3.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651, upload-time = "2025-12-04T14:49:44.05Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/e5/40dbda2736893e3e53d25838e0f19a2b417dfc122b9989c91918db30b5d3/greenlet-3.3.0.tar.gz", hash = "sha256:a82bb225a4e9e4d653dd2fb7b8b2d36e4fb25bc0165422a11e48b88e9e6f78fb", size = 190651 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" }, - { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" }, - { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" }, - { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" }, - { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" }, - { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" }, - { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" }, + { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379 }, + { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294 }, + { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742 }, + { url = "https://files.pythonhosted.org/packages/77/cb/43692bcd5f7a0da6ec0ec6d58ee7cddb606d055ce94a62ac9b1aa481e969/greenlet-3.3.0-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:c024b1e5696626890038e34f76140ed1daf858e37496d33f2af57f06189e70d7", size = 622297 }, + { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885 }, + { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424 }, + { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017 }, + { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964 }, ] [[package]] name = "h11" version = "0.16.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515 }, ] [[package]] @@ -492,9 +682,9 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" }, + { url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784 }, ] [[package]] @@ -507,61 +697,61 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, ] [[package]] name = "httpx-sse" version = "0.4.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943, upload-time = "2025-10-10T21:48:22.271Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/4c/751061ffa58615a32c31b2d82e8482be8dd4a89154f003147acee90f2be9/httpx_sse-0.4.3.tar.gz", hash = "sha256:9b1ed0127459a66014aec3c56bebd93da3c1bc8bb6618c8082039a44889a755d", size = 15943 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960, upload-time = "2025-10-10T21:48:21.158Z" }, + { url = "https://files.pythonhosted.org/packages/d2/fd/6668e5aec43ab844de6fc74927e155a3b37bf40d7c3790e49fc0406b6578/httpx_sse-0.4.3-py3-none-any.whl", hash = "sha256:0ac1c9fe3c0afad2e0ebb25a934a59f4c7823b60792691f779fad2c5568830fc", size = 8960 }, ] [[package]] name = "idna" version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008 }, ] [[package]] name = "itsdangerous" version = "2.2.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410, upload-time = "2024-04-16T21:28:15.614Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9c/cb/8ac0172223afbccb63986cc25049b154ecfb5e85932587206f42317be31d/itsdangerous-2.2.0.tar.gz", hash = "sha256:e0050c0b7da1eea53ffaf149c0cfbb5c6e2e2b69c4bef22c81fa6eb73e5f6173", size = 54410 } wheels = [ - { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234, upload-time = "2024-04-16T21:28:14.499Z" }, + { url = "https://files.pythonhosted.org/packages/04/96/92447566d16df59b2a776c0fb82dbc4d9e07cd95062562af01e408583fc4/itsdangerous-2.2.0-py3-none-any.whl", hash = "sha256:c6242fc49e35958c8b15141343aa660db5fc54d4f13a1db01a3f5891b98700ef", size = 16234 }, ] [[package]] name = "jiter" version = "0.12.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294, upload-time = "2025-11-09T20:49:23.302Z" } +sdist = { url = "https://files.pythonhosted.org/packages/45/9d/e0660989c1370e25848bb4c52d061c71837239738ad937e83edca174c273/jiter-0.12.0.tar.gz", hash = "sha256:64dfcd7d5c168b38d3f9f8bba7fc639edb3418abcc74f22fdbe6b8938293f30b", size = 168294 } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449, upload-time = "2025-11-09T20:47:22.999Z" }, - { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855, upload-time = "2025-11-09T20:47:24.779Z" }, - { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171, upload-time = "2025-11-09T20:47:26.469Z" }, - { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590, upload-time = "2025-11-09T20:47:27.918Z" }, - { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462, upload-time = "2025-11-09T20:47:29.654Z" }, - { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983, upload-time = "2025-11-09T20:47:31.026Z" }, - { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328, upload-time = "2025-11-09T20:47:33.286Z" }, - { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740, upload-time = "2025-11-09T20:47:34.703Z" }, - { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875, upload-time = "2025-11-09T20:47:36.058Z" }, - { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457, upload-time = "2025-11-09T20:47:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546, upload-time = "2025-11-09T20:47:40.47Z" }, - { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196, upload-time = "2025-11-09T20:47:41.794Z" }, - { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100, upload-time = "2025-11-09T20:47:43.007Z" }, - { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974, upload-time = "2025-11-09T20:49:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233, upload-time = "2025-11-09T20:49:18.734Z" }, - { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537, upload-time = "2025-11-09T20:49:20.317Z" }, - { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110, upload-time = "2025-11-09T20:49:21.817Z" }, + { url = "https://files.pythonhosted.org/packages/92/c9/5b9f7b4983f1b542c64e84165075335e8a236fa9e2ea03a0c79780062be8/jiter-0.12.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:305e061fa82f4680607a775b2e8e0bcb071cd2205ac38e6ef48c8dd5ebe1cf37", size = 314449 }, + { url = "https://files.pythonhosted.org/packages/98/6e/e8efa0e78de00db0aee82c0cf9e8b3f2027efd7f8a71f859d8f4be8e98ef/jiter-0.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5c1860627048e302a528333c9307c818c547f214d8659b0705d2195e1a94b274", size = 319855 }, + { url = "https://files.pythonhosted.org/packages/20/26/894cd88e60b5d58af53bec5c6759d1292bd0b37a8b5f60f07abf7a63ae5f/jiter-0.12.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df37577a4f8408f7e0ec3205d2a8f87672af8f17008358063a4d6425b6081ce3", size = 350171 }, + { url = "https://files.pythonhosted.org/packages/f5/27/a7b818b9979ac31b3763d25f3653ec3a954044d5e9f5d87f2f247d679fd1/jiter-0.12.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fdd787356c1c13a4f40b43c2156276ef7a71eb487d98472476476d803fb2cf", size = 365590 }, + { url = "https://files.pythonhosted.org/packages/ba/7e/e46195801a97673a83746170b17984aa8ac4a455746354516d02ca5541b4/jiter-0.12.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1eb5db8d9c65b112aacf14fcd0faae9913d07a8afea5ed06ccdd12b724e966a1", size = 479462 }, + { url = "https://files.pythonhosted.org/packages/ca/75/f833bfb009ab4bd11b1c9406d333e3b4357709ed0570bb48c7c06d78c7dd/jiter-0.12.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:73c568cc27c473f82480abc15d1301adf333a7ea4f2e813d6a2c7d8b6ba8d0df", size = 378983 }, + { url = "https://files.pythonhosted.org/packages/71/b3/7a69d77943cc837d30165643db753471aff5df39692d598da880a6e51c24/jiter-0.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4321e8a3d868919bcb1abb1db550d41f2b5b326f72df29e53b2df8b006eb9403", size = 361328 }, + { url = "https://files.pythonhosted.org/packages/b0/ac/a78f90caf48d65ba70d8c6efc6f23150bc39dc3389d65bbec2a95c7bc628/jiter-0.12.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0a51bad79f8cc9cac2b4b705039f814049142e0050f30d91695a2d9a6611f126", size = 386740 }, + { url = "https://files.pythonhosted.org/packages/39/b6/5d31c2cc8e1b6a6bcf3c5721e4ca0a3633d1ab4754b09bc7084f6c4f5327/jiter-0.12.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2a67b678f6a5f1dd6c36d642d7db83e456bc8b104788262aaefc11a22339f5a9", size = 520875 }, + { url = "https://files.pythonhosted.org/packages/30/b5/4df540fae4e9f68c54b8dab004bd8c943a752f0b00efd6e7d64aa3850339/jiter-0.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efe1a211fe1fd14762adea941e3cfd6c611a136e28da6c39272dbb7a1bbe6a86", size = 511457 }, + { url = "https://files.pythonhosted.org/packages/07/65/86b74010e450a1a77b2c1aabb91d4a91dd3cd5afce99f34d75fd1ac64b19/jiter-0.12.0-cp312-cp312-win32.whl", hash = "sha256:d779d97c834b4278276ec703dc3fc1735fca50af63eb7262f05bdb4e62203d44", size = 204546 }, + { url = "https://files.pythonhosted.org/packages/1c/c7/6659f537f9562d963488e3e55573498a442503ced01f7e169e96a6110383/jiter-0.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e8269062060212b373316fe69236096aaf4c49022d267c6736eebd66bbbc60bb", size = 205196 }, + { url = "https://files.pythonhosted.org/packages/21/f4/935304f5169edadfec7f9c01eacbce4c90bb9a82035ac1de1f3bd2d40be6/jiter-0.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:06cb970936c65de926d648af0ed3d21857f026b1cf5525cb2947aa5e01e05789", size = 186100 }, + { url = "https://files.pythonhosted.org/packages/cb/f5/12efb8ada5f5c9edc1d4555fe383c1fb2eac05ac5859258a72d61981d999/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:e8547883d7b96ef2e5fe22b88f8a4c8725a56e7f4abafff20fd5272d634c7ecb", size = 309974 }, + { url = "https://files.pythonhosted.org/packages/85/15/d6eb3b770f6a0d332675141ab3962fd4a7c270ede3515d9f3583e1d28276/jiter-0.12.0-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:89163163c0934854a668ed783a2546a0617f71706a2551a4a0666d91ab365d6b", size = 304233 }, + { url = "https://files.pythonhosted.org/packages/8c/3e/e7e06743294eea2cf02ced6aa0ff2ad237367394e37a0e2b4a1108c67a36/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d96b264ab7d34bbb2312dedc47ce07cd53f06835eacbc16dde3761f47c3a9e7f", size = 338537 }, + { url = "https://files.pythonhosted.org/packages/2f/9c/6753e6522b8d0ef07d3a3d239426669e984fb0eba15a315cdbc1253904e4/jiter-0.12.0-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24e864cb30ab82311c6425655b0cdab0a98c5d973b065c66a3f020740c2324c", size = 346110 }, ] [[package]] @@ -574,9 +764,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583, upload-time = "2026-01-07T13:41:07.246Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/fc/e067678238fa451312d4c62bf6e6cf5ec56375422aee02f9cb5f909b3047/jsonschema-4.26.0.tar.gz", hash = "sha256:0c26707e2efad8aa1bfc5b7ce170f3fccc2e4918ff85989ba9ffa9facb2be326", size = 366583 } wheels = [ - { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630, upload-time = "2026-01-07T13:41:05.306Z" }, + { url = "https://files.pythonhosted.org/packages/69/90/f63fb5873511e014207a475e2bb4e8b2e570d655b00ac19a9a0ca0a385ee/jsonschema-4.26.0-py3-none-any.whl", hash = "sha256:d489f15263b8d200f8387e64b4c3a75f06629559fb73deb8fdfb525f2dab50ce", size = 90630 }, ] [[package]] @@ -586,9 +776,24 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "referencing" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855, upload-time = "2025-09-08T01:34:59.186Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/74/a633ee74eb36c44aa6d1095e7cc5569bebf04342ee146178e2d36600708b/jsonschema_specifications-2025.9.1.tar.gz", hash = "sha256:b540987f239e745613c7a9176f3edb72b832a4ac465cf02712288397832b5e8d", size = 32855 } wheels = [ - { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437, upload-time = "2025-09-08T01:34:57.871Z" }, + { url = "https://files.pythonhosted.org/packages/41/45/1a4ed80516f02155c51f51e8cedb3c1902296743db0bbc66608a0db2814f/jsonschema_specifications-2025.9.1-py3-none-any.whl", hash = "sha256:98802fee3a11ee76ecaca44429fda8a41bff98b00a0f2838151b113f210cc6fe", size = 18437 }, +] + +[[package]] +name = "kombu" +version = "5.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "amqp" }, + { name = "packaging" }, + { name = "tzdata" }, + { name = "vine" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/a5/607e533ed6c83ae1a696969b8e1c137dfebd5759a2e9682e26ff1b97740b/kombu-5.6.2.tar.gz", hash = "sha256:8060497058066c6f5aed7c26d7cd0d3b574990b09de842a8c5aaed0b92cc5a55", size = 472594 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/0f/834427d8c03ff1d7e867d3db3d176470c64871753252b21b4f4897d1fa45/kombu-5.6.2-py3-none-any.whl", hash = "sha256:efcfc559da324d41d61ca311b0c64965ea35b4c55cc04ee36e55386145dace93", size = 214219 }, ] [[package]] @@ -598,28 +803,28 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "markupsafe" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474 } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509 }, ] [[package]] name = "markupsafe" version = "3.0.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313 } wheels = [ - { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, - { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, - { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, - { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, - { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, - { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, - { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, - { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, - { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615 }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020 }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332 }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947 }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962 }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760 }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529 }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015 }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540 }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105 }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906 }, ] [[package]] @@ -642,28 +847,55 @@ dependencies = [ { name = "typing-inspection" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387, upload-time = "2025-12-19T10:19:56.985Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d5/2d/649d80a0ecf6a1f82632ca44bec21c0461a9d9fc8934d38cb5b319f2db5e/mcp-1.25.0.tar.gz", hash = "sha256:56310361ebf0364e2d438e5b45f7668cbb124e158bb358333cd06e49e83a6802", size = 605387 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076 }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/fc/6dc7659c2ae5ddf280477011f4213a74f806862856b796ef08f028e664bf/mcp-1.25.0-py3-none-any.whl", hash = "sha256:b37c38144a666add0862614cc79ec276e97d72aa8ca26d622818d4e278b9721a", size = 233076, upload-time = "2025-12-19T10:19:55.416Z" }, + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893 }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456 }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872 }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018 }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883 }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413 }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404 }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456 }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322 }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955 }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254 }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059 }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588 }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642 }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377 }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887 }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053 }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307 }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319 }, ] [[package]] name = "numpy" version = "2.4.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320, upload-time = "2026-01-10T06:44:59.619Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/62/ae72ff66c0f1fd959925b4c11f8c2dea61f47f6acaea75a08512cdfe3fed/numpy-2.4.1.tar.gz", hash = "sha256:a1ceafc5042451a858231588a104093474c6a5c57dcc724841f5c888d237d690", size = 20721320 } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888, upload-time = "2026-01-10T06:42:40.913Z" }, - { url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956, upload-time = "2026-01-10T06:42:43.091Z" }, - { url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567, upload-time = "2026-01-10T06:42:45.107Z" }, - { url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459, upload-time = "2026-01-10T06:42:48.152Z" }, - { url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859, upload-time = "2026-01-10T06:42:49.947Z" }, - { url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419, upload-time = "2026-01-10T06:42:52.409Z" }, - { url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131, upload-time = "2026-01-10T06:42:54.694Z" }, - { url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342, upload-time = "2026-01-10T06:42:56.991Z" }, - { url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015, upload-time = "2026-01-10T06:42:59.631Z" }, - { url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730, upload-time = "2026-01-10T06:43:01.627Z" }, - { url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166, upload-time = "2026-01-10T06:43:03.673Z" }, + { url = "https://files.pythonhosted.org/packages/78/7f/ec53e32bf10c813604edf07a3682616bd931d026fcde7b6d13195dfb684a/numpy-2.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d3703409aac693fa82c0aee023a1ae06a6e9d065dba10f5e8e80f642f1e9d0a2", size = 16656888 }, + { url = "https://files.pythonhosted.org/packages/b8/e0/1f9585d7dae8f14864e948fd7fa86c6cb72dee2676ca2748e63b1c5acfe0/numpy-2.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7211b95ca365519d3596a1d8688a95874cc94219d417504d9ecb2df99fa7bfa8", size = 12373956 }, + { url = "https://files.pythonhosted.org/packages/8e/43/9762e88909ff2326f5e7536fa8cb3c49fb03a7d92705f23e6e7f553d9cb3/numpy-2.4.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5adf01965456a664fc727ed69cc71848f28d063217c63e1a0e200a118d5eec9a", size = 5202567 }, + { url = "https://files.pythonhosted.org/packages/4b/ee/34b7930eb61e79feb4478800a4b95b46566969d837546aa7c034c742ef98/numpy-2.4.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:26f0bcd9c79a00e339565b303badc74d3ea2bd6d52191eeca5f95936cad107d0", size = 6549459 }, + { url = "https://files.pythonhosted.org/packages/79/e3/5f115fae982565771be994867c89bcd8d7208dbfe9469185497d70de5ddf/numpy-2.4.1-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0093e85df2960d7e4049664b26afc58b03236e967fb942354deef3208857a04c", size = 14404859 }, + { url = "https://files.pythonhosted.org/packages/d9/7d/9c8a781c88933725445a859cac5d01b5871588a15969ee6aeb618ba99eee/numpy-2.4.1-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7ad270f438cbdd402c364980317fb6b117d9ec5e226fff5b4148dd9aa9fc6e02", size = 16371419 }, + { url = "https://files.pythonhosted.org/packages/a6/d2/8aa084818554543f17cf4162c42f162acbd3bb42688aefdba6628a859f77/numpy-2.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:297c72b1b98100c2e8f873d5d35fb551fce7040ade83d67dd51d38c8d42a2162", size = 16182131 }, + { url = "https://files.pythonhosted.org/packages/60/db/0425216684297c58a8df35f3284ef56ec4a043e6d283f8a59c53562caf1b/numpy-2.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf6470d91d34bf669f61d515499859fa7a4c2f7c36434afb70e82df7217933f9", size = 18295342 }, + { url = "https://files.pythonhosted.org/packages/31/4c/14cb9d86240bd8c386c881bafbe43f001284b7cce3bc01623ac9475da163/numpy-2.4.1-cp312-cp312-win32.whl", hash = "sha256:b6bcf39112e956594b3331316d90c90c90fb961e39696bda97b89462f5f3943f", size = 5959015 }, + { url = "https://files.pythonhosted.org/packages/51/cf/52a703dbeb0c65807540d29699fef5fda073434ff61846a564d5c296420f/numpy-2.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:e1a27bb1b2dee45a2a53f5ca6ff2d1a7f135287883a1689e930d44d1ff296c87", size = 12310730 }, + { url = "https://files.pythonhosted.org/packages/69/80/a828b2d0ade5e74a9fe0f4e0a17c30fdc26232ad2bc8c9f8b3197cf7cf18/numpy-2.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:0e6e8f9d9ecf95399982019c01223dc130542960a12edfa8edd1122dfa66a8a8", size = 10312166 }, ] [[package]] @@ -680,9 +912,9 @@ dependencies = [ { name = "tqdm" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133, upload-time = "2025-09-24T13:00:53.075Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c6/a1/a303104dc55fc546a3f6914c842d3da471c64eec92043aef8f652eb6c524/openai-1.109.1.tar.gz", hash = "sha256:d173ed8dbca665892a6db099b4a2dfac624f94d20a93f46eb0b56aae940ed869", size = 564133 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627, upload-time = "2025-09-24T13:00:50.754Z" }, + { url = "https://files.pythonhosted.org/packages/1d/2a/7dd3d207ec669cacc1f186fd856a0f61dbc255d24f6fdc1a6715d6051b0f/openai-1.109.1-py3-none-any.whl", hash = "sha256:6bcaf57086cf59159b8e27447e4e7dd019db5d29a438072fbd49c290c7e65315", size = 948627 }, ] [[package]] @@ -692,9 +924,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "et-xmlfile" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464, upload-time = "2024-06-28T14:03:44.161Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910 }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366 }, ] [[package]] @@ -706,25 +947,25 @@ dependencies = [ { name = "python-dateutil" }, { name = "tzdata", marker = "sys_platform == 'emscripten' or sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/de/da/b1dc0481ab8d55d0f46e343cfe67d4551a0e14fcee52bd38ca1bd73258d8/pandas-3.0.0.tar.gz", hash = "sha256:0facf7e87d38f721f0af46fe70d97373a37701b1c09f7ed7aeeb292ade5c050f", size = 4633005, upload-time = "2026-01-21T15:52:04.726Z" } +sdist = { url = "https://files.pythonhosted.org/packages/de/da/b1dc0481ab8d55d0f46e343cfe67d4551a0e14fcee52bd38ca1bd73258d8/pandas-3.0.0.tar.gz", hash = "sha256:0facf7e87d38f721f0af46fe70d97373a37701b1c09f7ed7aeeb292ade5c050f", size = 4633005 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/38/db33686f4b5fa64d7af40d96361f6a4615b8c6c8f1b3d334eee46ae6160e/pandas-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9803b31f5039b3c3b10cc858c5e40054adb4b29b4d81cb2fd789f4121c8efbcd", size = 10334013, upload-time = "2026-01-21T15:50:34.771Z" }, - { url = "https://files.pythonhosted.org/packages/a5/7b/9254310594e9774906bacdd4e732415e1f86ab7dbb4b377ef9ede58cd8ec/pandas-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14c2a4099cd38a1d18ff108168ea417909b2dea3bd1ebff2ccf28ddb6a74d740", size = 9874154, upload-time = "2026-01-21T15:50:36.67Z" }, - { url = "https://files.pythonhosted.org/packages/63/d4/726c5a67a13bc66643e66d2e9ff115cead482a44fc56991d0c4014f15aaf/pandas-3.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d257699b9a9960e6125686098d5714ac59d05222bef7a5e6af7a7fd87c650801", size = 10384433, upload-time = "2026-01-21T15:50:39.132Z" }, - { url = "https://files.pythonhosted.org/packages/bf/2e/9211f09bedb04f9832122942de8b051804b31a39cfbad199a819bb88d9f3/pandas-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:69780c98f286076dcafca38d8b8eee1676adf220199c0a39f0ecbf976b68151a", size = 10864519, upload-time = "2026-01-21T15:50:41.043Z" }, - { url = "https://files.pythonhosted.org/packages/00/8d/50858522cdc46ac88b9afdc3015e298959a70a08cd21e008a44e9520180c/pandas-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4a66384f017240f3858a4c8a7cf21b0591c3ac885cddb7758a589f0f71e87ebb", size = 11394124, upload-time = "2026-01-21T15:50:43.377Z" }, - { url = "https://files.pythonhosted.org/packages/86/3f/83b2577db02503cd93d8e95b0f794ad9d4be0ba7cb6c8bcdcac964a34a42/pandas-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be8c515c9bc33989d97b89db66ea0cececb0f6e3c2a87fcc8b69443a6923e95f", size = 11920444, upload-time = "2026-01-21T15:50:45.932Z" }, - { url = "https://files.pythonhosted.org/packages/64/2d/4f8a2f192ed12c90a0aab47f5557ece0e56b0370c49de9454a09de7381b2/pandas-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a453aad8c4f4e9f166436994a33884442ea62aa8b27d007311e87521b97246e1", size = 9730970, upload-time = "2026-01-21T15:50:47.962Z" }, - { url = "https://files.pythonhosted.org/packages/d4/64/ff571be435cf1e643ca98d0945d76732c0b4e9c37191a89c8550b105eed1/pandas-3.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:da768007b5a33057f6d9053563d6b74dd6d029c337d93c6d0d22a763a5c2ecc0", size = 9041950, upload-time = "2026-01-21T15:50:50.422Z" }, + { url = "https://files.pythonhosted.org/packages/0b/38/db33686f4b5fa64d7af40d96361f6a4615b8c6c8f1b3d334eee46ae6160e/pandas-3.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9803b31f5039b3c3b10cc858c5e40054adb4b29b4d81cb2fd789f4121c8efbcd", size = 10334013 }, + { url = "https://files.pythonhosted.org/packages/a5/7b/9254310594e9774906bacdd4e732415e1f86ab7dbb4b377ef9ede58cd8ec/pandas-3.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:14c2a4099cd38a1d18ff108168ea417909b2dea3bd1ebff2ccf28ddb6a74d740", size = 9874154 }, + { url = "https://files.pythonhosted.org/packages/63/d4/726c5a67a13bc66643e66d2e9ff115cead482a44fc56991d0c4014f15aaf/pandas-3.0.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d257699b9a9960e6125686098d5714ac59d05222bef7a5e6af7a7fd87c650801", size = 10384433 }, + { url = "https://files.pythonhosted.org/packages/bf/2e/9211f09bedb04f9832122942de8b051804b31a39cfbad199a819bb88d9f3/pandas-3.0.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:69780c98f286076dcafca38d8b8eee1676adf220199c0a39f0ecbf976b68151a", size = 10864519 }, + { url = "https://files.pythonhosted.org/packages/00/8d/50858522cdc46ac88b9afdc3015e298959a70a08cd21e008a44e9520180c/pandas-3.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4a66384f017240f3858a4c8a7cf21b0591c3ac885cddb7758a589f0f71e87ebb", size = 11394124 }, + { url = "https://files.pythonhosted.org/packages/86/3f/83b2577db02503cd93d8e95b0f794ad9d4be0ba7cb6c8bcdcac964a34a42/pandas-3.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be8c515c9bc33989d97b89db66ea0cececb0f6e3c2a87fcc8b69443a6923e95f", size = 11920444 }, + { url = "https://files.pythonhosted.org/packages/64/2d/4f8a2f192ed12c90a0aab47f5557ece0e56b0370c49de9454a09de7381b2/pandas-3.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:a453aad8c4f4e9f166436994a33884442ea62aa8b27d007311e87521b97246e1", size = 9730970 }, + { url = "https://files.pythonhosted.org/packages/d4/64/ff571be435cf1e643ca98d0945d76732c0b4e9c37191a89c8550b105eed1/pandas-3.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:da768007b5a33057f6d9053563d6b74dd6d029c337d93c6d0d22a763a5c2ecc0", size = 9041950 }, ] [[package]] name = "passlib" version = "1.7.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", size = 689844, upload-time = "2020-10-08T19:00:52.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/06/9da9ee59a67fae7761aab3ccc84fa4f3f33f125b370f1ccdb915bf967c11/passlib-1.7.4.tar.gz", hash = "sha256:defd50f72b65c5402ab2c573830a6978e5f202ad0d984793c8dde2c4152ebe04", size = 689844 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", size = 525554, upload-time = "2020-10-08T19:00:49.856Z" }, + { url = "https://files.pythonhosted.org/packages/3b/a4/ab6b7589382ca3df236e03faa71deac88cae040af60c071a78d254a62172/passlib-1.7.4-py2.py3-none-any.whl", hash = "sha256:aa6bca462b8d8bda89c70b382f0c298a20b5560af6cbfa2dce410c0a2fb669f1", size = 525554 }, ] [package.optional-dependencies] @@ -736,61 +977,97 @@ bcrypt = [ name = "pillow" version = "12.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/02/d52c733a2452ef1ffcc123b68e6606d07276b0e358db70eabad7e40042b7/pillow-12.1.0.tar.gz", hash = "sha256:5c5ae0a06e9ea030ab786b0251b32c7e4ce10e58d983c0d5c56029455180b5b9", size = 46977283, upload-time = "2026-01-02T09:13:29.892Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/02/d52c733a2452ef1ffcc123b68e6606d07276b0e358db70eabad7e40042b7/pillow-12.1.0.tar.gz", hash = "sha256:5c5ae0a06e9ea030ab786b0251b32c7e4ce10e58d983c0d5c56029455180b5b9", size = 46977283 } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/31/dc53fe21a2f2996e1b7d92bf671cdb157079385183ef7c1ae08b485db510/pillow-12.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a332ac4ccb84b6dde65dbace8431f3af08874bf9770719d32a635c4ef411b18b", size = 5262642, upload-time = "2026-01-02T09:11:10.138Z" }, - { url = "https://files.pythonhosted.org/packages/ab/c1/10e45ac9cc79419cedf5121b42dcca5a50ad2b601fa080f58c22fb27626e/pillow-12.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:907bfa8a9cb790748a9aa4513e37c88c59660da3bcfffbd24a7d9e6abf224551", size = 4657464, upload-time = "2026-01-02T09:11:12.319Z" }, - { url = "https://files.pythonhosted.org/packages/ad/26/7b82c0ab7ef40ebede7a97c72d473bda5950f609f8e0c77b04af574a0ddb/pillow-12.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efdc140e7b63b8f739d09a99033aa430accce485ff78e6d311973a67b6bf3208", size = 6234878, upload-time = "2026-01-02T09:11:14.096Z" }, - { url = "https://files.pythonhosted.org/packages/76/25/27abc9792615b5e886ca9411ba6637b675f1b77af3104710ac7353fe5605/pillow-12.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bef9768cab184e7ae6e559c032e95ba8d07b3023c289f79a2bd36e8bf85605a5", size = 8044868, upload-time = "2026-01-02T09:11:15.903Z" }, - { url = "https://files.pythonhosted.org/packages/0a/ea/f200a4c36d836100e7bc738fc48cd963d3ba6372ebc8298a889e0cfc3359/pillow-12.1.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:742aea052cf5ab5034a53c3846165bc3ce88d7c38e954120db0ab867ca242661", size = 6349468, upload-time = "2026-01-02T09:11:17.631Z" }, - { url = "https://files.pythonhosted.org/packages/11/8f/48d0b77ab2200374c66d344459b8958c86693be99526450e7aee714e03e4/pillow-12.1.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6dfc2af5b082b635af6e08e0d1f9f1c4e04d17d4e2ca0ef96131e85eda6eb17", size = 7041518, upload-time = "2026-01-02T09:11:19.389Z" }, - { url = "https://files.pythonhosted.org/packages/1d/23/c281182eb986b5d31f0a76d2a2c8cd41722d6fb8ed07521e802f9bba52de/pillow-12.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:609e89d9f90b581c8d16358c9087df76024cf058fa693dd3e1e1620823f39670", size = 6462829, upload-time = "2026-01-02T09:11:21.28Z" }, - { url = "https://files.pythonhosted.org/packages/25/ef/7018273e0faac099d7b00982abdcc39142ae6f3bd9ceb06de09779c4a9d6/pillow-12.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43b4899cfd091a9693a1278c4982f3e50f7fb7cff5153b05174b4afc9593b616", size = 7166756, upload-time = "2026-01-02T09:11:23.559Z" }, - { url = "https://files.pythonhosted.org/packages/8f/c8/993d4b7ab2e341fe02ceef9576afcf5830cdec640be2ac5bee1820d693d4/pillow-12.1.0-cp312-cp312-win32.whl", hash = "sha256:aa0c9cc0b82b14766a99fbe6084409972266e82f459821cd26997a488a7261a7", size = 6328770, upload-time = "2026-01-02T09:11:25.661Z" }, - { url = "https://files.pythonhosted.org/packages/a7/87/90b358775a3f02765d87655237229ba64a997b87efa8ccaca7dd3e36e7a7/pillow-12.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:d70534cea9e7966169ad29a903b99fc507e932069a881d0965a1a84bb57f6c6d", size = 7033406, upload-time = "2026-01-02T09:11:27.474Z" }, - { url = "https://files.pythonhosted.org/packages/5d/cf/881b457eccacac9e5b2ddd97d5071fb6d668307c57cbf4e3b5278e06e536/pillow-12.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:65b80c1ee7e14a87d6a068dd3b0aea268ffcabfe0498d38661b00c5b4b22e74c", size = 2452612, upload-time = "2026-01-02T09:11:29.309Z" }, + { url = "https://files.pythonhosted.org/packages/20/31/dc53fe21a2f2996e1b7d92bf671cdb157079385183ef7c1ae08b485db510/pillow-12.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a332ac4ccb84b6dde65dbace8431f3af08874bf9770719d32a635c4ef411b18b", size = 5262642 }, + { url = "https://files.pythonhosted.org/packages/ab/c1/10e45ac9cc79419cedf5121b42dcca5a50ad2b601fa080f58c22fb27626e/pillow-12.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:907bfa8a9cb790748a9aa4513e37c88c59660da3bcfffbd24a7d9e6abf224551", size = 4657464 }, + { url = "https://files.pythonhosted.org/packages/ad/26/7b82c0ab7ef40ebede7a97c72d473bda5950f609f8e0c77b04af574a0ddb/pillow-12.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:efdc140e7b63b8f739d09a99033aa430accce485ff78e6d311973a67b6bf3208", size = 6234878 }, + { url = "https://files.pythonhosted.org/packages/76/25/27abc9792615b5e886ca9411ba6637b675f1b77af3104710ac7353fe5605/pillow-12.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:bef9768cab184e7ae6e559c032e95ba8d07b3023c289f79a2bd36e8bf85605a5", size = 8044868 }, + { url = "https://files.pythonhosted.org/packages/0a/ea/f200a4c36d836100e7bc738fc48cd963d3ba6372ebc8298a889e0cfc3359/pillow-12.1.0-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:742aea052cf5ab5034a53c3846165bc3ce88d7c38e954120db0ab867ca242661", size = 6349468 }, + { url = "https://files.pythonhosted.org/packages/11/8f/48d0b77ab2200374c66d344459b8958c86693be99526450e7aee714e03e4/pillow-12.1.0-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a6dfc2af5b082b635af6e08e0d1f9f1c4e04d17d4e2ca0ef96131e85eda6eb17", size = 7041518 }, + { url = "https://files.pythonhosted.org/packages/1d/23/c281182eb986b5d31f0a76d2a2c8cd41722d6fb8ed07521e802f9bba52de/pillow-12.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:609e89d9f90b581c8d16358c9087df76024cf058fa693dd3e1e1620823f39670", size = 6462829 }, + { url = "https://files.pythonhosted.org/packages/25/ef/7018273e0faac099d7b00982abdcc39142ae6f3bd9ceb06de09779c4a9d6/pillow-12.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:43b4899cfd091a9693a1278c4982f3e50f7fb7cff5153b05174b4afc9593b616", size = 7166756 }, + { url = "https://files.pythonhosted.org/packages/8f/c8/993d4b7ab2e341fe02ceef9576afcf5830cdec640be2ac5bee1820d693d4/pillow-12.1.0-cp312-cp312-win32.whl", hash = "sha256:aa0c9cc0b82b14766a99fbe6084409972266e82f459821cd26997a488a7261a7", size = 6328770 }, + { url = "https://files.pythonhosted.org/packages/a7/87/90b358775a3f02765d87655237229ba64a997b87efa8ccaca7dd3e36e7a7/pillow-12.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:d70534cea9e7966169ad29a903b99fc507e932069a881d0965a1a84bb57f6c6d", size = 7033406 }, + { url = "https://files.pythonhosted.org/packages/5d/cf/881b457eccacac9e5b2ddd97d5071fb6d668307c57cbf4e3b5278e06e536/pillow-12.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:65b80c1ee7e14a87d6a068dd3b0aea268ffcabfe0498d38661b00c5b4b22e74c", size = 2452612 }, +] + +[[package]] +name = "prompt-toolkit" +version = "3.0.52" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "wcwidth" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/96/06e01a7b38dce6fe1db213e061a4602dd6032a8a97ef6c1a862537732421/prompt_toolkit-3.0.52.tar.gz", hash = "sha256:28cde192929c8e7321de85de1ddbe736f1375148b02f2e17edd840042b1be855", size = 434198 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/03/0d3ce49e2505ae70cf43bc5bb3033955d2fc9f932163e84dc0779cc47f48/prompt_toolkit-3.0.52-py3-none-any.whl", hash = "sha256:9aac639a3bbd33284347de5ad8d68ecc044b91a762dc39b7c21095fcd6a19955", size = 391431 }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061 }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037 }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324 }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505 }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242 }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474 }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575 }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736 }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019 }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376 }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988 }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615 }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066 }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655 }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789 }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305 }, ] [[package]] name = "psutil" version = "5.9.8" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/90/c7/6dc0a455d111f68ee43f27793971cf03fe29b6ef972042549db29eec39a2/psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c", size = 503247, upload-time = "2024-01-19T20:47:09.517Z" } +sdist = { url = "https://files.pythonhosted.org/packages/90/c7/6dc0a455d111f68ee43f27793971cf03fe29b6ef972042549db29eec39a2/psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c", size = 503247 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/e3/07ae864a636d70a8a6f58da27cb1179192f1140d5d1da10886ade9405797/psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81", size = 248702, upload-time = "2024-01-19T20:47:36.303Z" }, - { url = "https://files.pythonhosted.org/packages/b3/bd/28c5f553667116b2598b9cc55908ec435cb7f77a34f2bff3e3ca765b0f78/psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421", size = 285242, upload-time = "2024-01-19T20:47:39.65Z" }, - { url = "https://files.pythonhosted.org/packages/c5/4f/0e22aaa246f96d6ac87fe5ebb9c5a693fbe8877f537a1022527c47ca43c5/psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4", size = 288191, upload-time = "2024-01-19T20:47:43.078Z" }, - { url = "https://files.pythonhosted.org/packages/6e/f5/2aa3a4acdc1e5940b59d421742356f133185667dd190b166dbcfcf5d7b43/psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0", size = 251252, upload-time = "2024-01-19T20:47:52.88Z" }, - { url = "https://files.pythonhosted.org/packages/93/52/3e39d26feae7df0aa0fd510b14012c3678b36ed068f7d78b8d8784d61f0e/psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf", size = 255090, upload-time = "2024-01-19T20:47:56.019Z" }, - { url = "https://files.pythonhosted.org/packages/05/33/2d74d588408caedd065c2497bdb5ef83ce6082db01289a1e1147f6639802/psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8", size = 249898, upload-time = "2024-01-19T20:47:59.238Z" }, + { url = "https://files.pythonhosted.org/packages/e7/e3/07ae864a636d70a8a6f58da27cb1179192f1140d5d1da10886ade9405797/psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81", size = 248702 }, + { url = "https://files.pythonhosted.org/packages/b3/bd/28c5f553667116b2598b9cc55908ec435cb7f77a34f2bff3e3ca765b0f78/psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421", size = 285242 }, + { url = "https://files.pythonhosted.org/packages/c5/4f/0e22aaa246f96d6ac87fe5ebb9c5a693fbe8877f537a1022527c47ca43c5/psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4", size = 288191 }, + { url = "https://files.pythonhosted.org/packages/6e/f5/2aa3a4acdc1e5940b59d421742356f133185667dd190b166dbcfcf5d7b43/psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0", size = 251252 }, + { url = "https://files.pythonhosted.org/packages/93/52/3e39d26feae7df0aa0fd510b14012c3678b36ed068f7d78b8d8784d61f0e/psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf", size = 255090 }, + { url = "https://files.pythonhosted.org/packages/05/33/2d74d588408caedd065c2497bdb5ef83ce6082db01289a1e1147f6639802/psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8", size = 249898 }, ] [[package]] name = "psycopg2-binary" version = "2.9.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620, upload-time = "2025-10-10T11:14:48.041Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/6c/8767aaa597ba424643dc87348c6f1754dd9f48e80fdc1b9f7ca5c3a7c213/psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c", size = 379620 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603, upload-time = "2025-10-10T11:11:52.213Z" }, - { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509, upload-time = "2025-10-10T11:11:56.452Z" }, - { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159, upload-time = "2025-10-10T11:12:00.49Z" }, - { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234, upload-time = "2025-10-10T11:12:04.892Z" }, - { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236, upload-time = "2025-10-10T11:12:11.674Z" }, - { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083, upload-time = "2025-10-30T02:55:15.73Z" }, - { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281, upload-time = "2025-10-10T11:12:17.713Z" }, - { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010, upload-time = "2025-10-10T11:12:22.671Z" }, - { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641, upload-time = "2025-10-30T02:55:19.929Z" }, - { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940, upload-time = "2025-10-10T11:12:26.529Z" }, - { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147, upload-time = "2025-10-10T11:12:29.535Z" }, + { url = "https://files.pythonhosted.org/packages/d8/91/f870a02f51be4a65987b45a7de4c2e1897dd0d01051e2b559a38fa634e3e/psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4", size = 3756603 }, + { url = "https://files.pythonhosted.org/packages/27/fa/cae40e06849b6c9a95eb5c04d419942f00d9eaac8d81626107461e268821/psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc", size = 3864509 }, + { url = "https://files.pythonhosted.org/packages/2d/75/364847b879eb630b3ac8293798e380e441a957c53657995053c5ec39a316/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a", size = 4411159 }, + { url = "https://files.pythonhosted.org/packages/6f/a0/567f7ea38b6e1c62aafd58375665a547c00c608a471620c0edc364733e13/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e", size = 4468234 }, + { url = "https://files.pythonhosted.org/packages/30/da/4e42788fb811bbbfd7b7f045570c062f49e350e1d1f3df056c3fb5763353/psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db", size = 4166236 }, + { url = "https://files.pythonhosted.org/packages/3c/94/c1777c355bc560992af848d98216148be5f1be001af06e06fc49cbded578/psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757", size = 3983083 }, + { url = "https://files.pythonhosted.org/packages/bd/42/c9a21edf0e3daa7825ed04a4a8588686c6c14904344344a039556d78aa58/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3", size = 3652281 }, + { url = "https://files.pythonhosted.org/packages/12/22/dedfbcfa97917982301496b6b5e5e6c5531d1f35dd2b488b08d1ebc52482/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a", size = 3298010 }, + { url = "https://files.pythonhosted.org/packages/66/ea/d3390e6696276078bd01b2ece417deac954dfdd552d2edc3d03204416c0c/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34", size = 3044641 }, + { url = "https://files.pythonhosted.org/packages/12/9a/0402ded6cbd321da0c0ba7d34dc12b29b14f5764c2fc10750daa38e825fc/psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d", size = 3347940 }, + { url = "https://files.pythonhosted.org/packages/b1/d2/99b55e85832ccde77b211738ff3925a5d73ad183c0b37bcbbe5a8ff04978/psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d", size = 2714147 }, ] [[package]] name = "pycparser" version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492 } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172 }, ] [[package]] @@ -803,9 +1080,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563", size = 814760, upload-time = "2025-10-07T15:58:03.467Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/da/b8a7ee04378a53f6fefefc0c5e05570a3ebfdfa0523a878bcd3b475683ee/pydantic-2.12.0.tar.gz", hash = "sha256:c1a077e6270dbfb37bfd8b498b3981e2bb18f68103720e51fa6c306a5a9af563", size = 814760 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f", size = 459730, upload-time = "2025-10-07T15:58:01.576Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9d/d5c855424e2e5b6b626fbc6ec514d8e655a600377ce283008b115abb7445/pydantic-2.12.0-py3-none-any.whl", hash = "sha256:f6a1da352d42790537e95e83a8bdfb91c7efbae63ffd0b86fa823899e807116f", size = 459730 }, ] [package.optional-dependencies] @@ -820,26 +1097,26 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870, upload-time = "2025-10-07T10:50:45.974Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300, upload-time = "2025-10-06T21:10:30.463Z" }, - { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179, upload-time = "2025-10-06T21:10:31.782Z" }, - { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225, upload-time = "2025-10-06T21:10:33.11Z" }, - { url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741", size = 2053315, upload-time = "2025-10-06T21:10:34.87Z" }, - { url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8", size = 2224298, upload-time = "2025-10-06T21:10:36.233Z" }, - { url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51", size = 2351797, upload-time = "2025-10-06T21:10:37.601Z" }, - { url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5", size = 2074921, upload-time = "2025-10-06T21:10:38.927Z" }, - { url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115", size = 2187767, upload-time = "2025-10-06T21:10:40.651Z" }, - { url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d", size = 2136062, upload-time = "2025-10-06T21:10:42.09Z" }, - { url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5", size = 2317819, upload-time = "2025-10-06T21:10:43.974Z" }, - { url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513", size = 2312267, upload-time = "2025-10-06T21:10:45.34Z" }, - { url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479", size = 1990927, upload-time = "2025-10-06T21:10:46.738Z" }, - { url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50", size = 2034703, upload-time = "2025-10-06T21:10:48.524Z" }, - { url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde", size = 1972719, upload-time = "2025-10-06T21:10:50.256Z" }, - { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344, upload-time = "2025-10-07T10:50:02.466Z" }, - { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994, upload-time = "2025-10-07T10:50:04.379Z" }, - { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394, upload-time = "2025-10-07T10:50:06.335Z" }, - { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365, upload-time = "2025-10-07T10:50:07.978Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/7d/14/12b4a0d2b0b10d8e1d9a24ad94e7bbb43335eaf29c0c4e57860e8a30734a/pydantic_core-2.41.1.tar.gz", hash = "sha256:1ad375859a6d8c356b7704ec0f547a58e82ee80bb41baa811ad710e124bc8f2f", size = 454870 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/bc/5f520319ee1c9e25010412fac4154a72e0a40d0a19eb00281b1f200c0947/pydantic_core-2.41.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:db2f82c0ccbce8f021ad304ce35cbe02aa2f95f215cac388eed542b03b4d5eb4", size = 2099300 }, + { url = "https://files.pythonhosted.org/packages/31/14/010cd64c5c3814fb6064786837ec12604be0dd46df3327cf8474e38abbbd/pydantic_core-2.41.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47694a31c710ced9205d5f1e7e8af3ca57cbb8a503d98cb9e33e27c97a501601", size = 1910179 }, + { url = "https://files.pythonhosted.org/packages/8e/2e/23fc2a8a93efad52df302fdade0a60f471ecc0c7aac889801ac24b4c07d6/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e9decce94daf47baf9e9d392f5f2557e783085f7c5e522011545d9d6858e00", size = 1957225 }, + { url = "https://files.pythonhosted.org/packages/b9/b6/6db08b2725b2432b9390844852e11d320281e5cea8a859c52c68001975fa/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab0adafdf2b89c8b84f847780a119437a0931eca469f7b44d356f2b426dd9741", size = 2053315 }, + { url = "https://files.pythonhosted.org/packages/61/d9/4de44600f2d4514b44f3f3aeeda2e14931214b6b5bf52479339e801ce748/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5da98cc81873f39fd56882e1569c4677940fbc12bce6213fad1ead784192d7c8", size = 2224298 }, + { url = "https://files.pythonhosted.org/packages/7a/ae/dbe51187a7f35fc21b283c5250571a94e36373eb557c1cba9f29a9806dcf/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:209910e88afb01fd0fd403947b809ba8dba0e08a095e1f703294fda0a8fdca51", size = 2351797 }, + { url = "https://files.pythonhosted.org/packages/b5/a7/975585147457c2e9fb951c7c8dab56deeb6aa313f3aa72c2fc0df3f74a49/pydantic_core-2.41.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:365109d1165d78d98e33c5bfd815a9b5d7d070f578caefaabcc5771825b4ecb5", size = 2074921 }, + { url = "https://files.pythonhosted.org/packages/62/37/ea94d1d0c01dec1b7d236c7cec9103baab0021f42500975de3d42522104b/pydantic_core-2.41.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:706abf21e60a2857acdb09502bc853ee5bce732955e7b723b10311114f033115", size = 2187767 }, + { url = "https://files.pythonhosted.org/packages/d3/fe/694cf9fdd3a777a618c3afd210dba7b414cb8a72b1bd29b199c2e5765fee/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bf0bd5417acf7f6a7ec3b53f2109f587be176cb35f9cf016da87e6017437a72d", size = 2136062 }, + { url = "https://files.pythonhosted.org/packages/0f/ae/174aeabd89916fbd2988cc37b81a59e1186e952afd2a7ed92018c22f31ca/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:2e71b1c6ceb9c78424ae9f63a07292fb769fb890a4e7efca5554c47f33a60ea5", size = 2317819 }, + { url = "https://files.pythonhosted.org/packages/65/e8/e9aecafaebf53fc456314f72886068725d6fba66f11b013532dc21259343/pydantic_core-2.41.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:80745b9770b4a38c25015b517451c817799bfb9d6499b0d13d8227ec941cb513", size = 2312267 }, + { url = "https://files.pythonhosted.org/packages/35/2f/1c2e71d2a052f9bb2f2df5a6a05464a0eb800f9e8d9dd800202fe31219e1/pydantic_core-2.41.1-cp312-cp312-win32.whl", hash = "sha256:83b64d70520e7890453f1aa21d66fda44e7b35f1cfea95adf7b4289a51e2b479", size = 1990927 }, + { url = "https://files.pythonhosted.org/packages/b1/78/562998301ff2588b9c6dcc5cb21f52fa919d6e1decc75a35055feb973594/pydantic_core-2.41.1-cp312-cp312-win_amd64.whl", hash = "sha256:377defd66ee2003748ee93c52bcef2d14fde48fe28a0b156f88c3dbf9bc49a50", size = 2034703 }, + { url = "https://files.pythonhosted.org/packages/b2/53/d95699ce5a5cdb44bb470bd818b848b9beadf51459fd4ea06667e8ede862/pydantic_core-2.41.1-cp312-cp312-win_arm64.whl", hash = "sha256:c95caff279d49c1d6cdfe2996e6c2ad712571d3b9caaa209a404426c326c4bde", size = 1972719 }, + { url = "https://files.pythonhosted.org/packages/2b/3e/a51c5f5d37b9288ba30683d6e96f10fa8f1defad1623ff09f1020973b577/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:b04fa9ed049461a7398138c604b00550bc89e3e1151d84b81ad6dc93e39c4c06", size = 2115344 }, + { url = "https://files.pythonhosted.org/packages/5a/bd/389504c9e0600ef4502cd5238396b527afe6ef8981a6a15cd1814fc7b434/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:b3b7d9cfbfdc43c80a16638c6dc2768e3956e73031fca64e8e1a3ae744d1faeb", size = 1927994 }, + { url = "https://files.pythonhosted.org/packages/ff/9c/5111c6b128861cb792a4c082677e90dac4f2e090bb2e2fe06aa5b2d39027/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eec83fc6abef04c7f9bec616e2d76ee9a6a4ae2a359b10c21d0f680e24a247ca", size = 1959394 }, + { url = "https://files.pythonhosted.org/packages/14/3f/cfec8b9a0c48ce5d64409ec5e1903cb0b7363da38f14b41de2fcb3712700/pydantic_core-2.41.1-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6771a2d9f83c4038dfad5970a3eef215940682b2175e32bcc817bdc639019b28", size = 2147365 }, ] [[package]] @@ -849,9 +1126,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/58/70/c21ed1ce36a947c5a7fee04c5d3926db4907f00bc29b193759d675554329/pydantic_i18n-0.4.5.tar.gz", hash = "sha256:37c3b40df31713dba27c436d15a8d894d6022f3da5b78a40805e6b64edde34a3", size = 78725, upload-time = "2024-09-22T15:29:39.828Z" } +sdist = { url = "https://files.pythonhosted.org/packages/58/70/c21ed1ce36a947c5a7fee04c5d3926db4907f00bc29b193759d675554329/pydantic_i18n-0.4.5.tar.gz", hash = "sha256:37c3b40df31713dba27c436d15a8d894d6022f3da5b78a40805e6b64edde34a3", size = 78725 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7e/3b/4d2630503016cedef1751bc9ddea85b437fbfc9ca65d6af87285d76b7c2c/pydantic_i18n-0.4.5-py3-none-any.whl", hash = "sha256:592ae6b4fee13eb0193dc0c7bdc1e629d2ab1d732d5508368412a338b16cfece", size = 10436, upload-time = "2024-09-22T15:29:38.397Z" }, + { url = "https://files.pythonhosted.org/packages/7e/3b/4d2630503016cedef1751bc9ddea85b437fbfc9ca65d6af87285d76b7c2c/pydantic_i18n-0.4.5-py3-none-any.whl", hash = "sha256:592ae6b4fee13eb0193dc0c7bdc1e629d2ab1d732d5508368412a338b16cfece", size = 10436 }, ] [[package]] @@ -863,9 +1140,9 @@ dependencies = [ { name = "python-dotenv" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184, upload-time = "2025-11-10T14:25:47.013Z" } +sdist = { url = "https://files.pythonhosted.org/packages/43/4b/ac7e0aae12027748076d72a8764ff1c9d82ca75a7a52622e67ed3f765c54/pydantic_settings-2.12.0.tar.gz", hash = "sha256:005538ef951e3c2a68e1c08b292b5f2e71490def8589d4221b95dab00dafcfd0", size = 194184 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880, upload-time = "2025-11-10T14:25:45.546Z" }, + { url = "https://files.pythonhosted.org/packages/c1/60/5d4751ba3f4a40a6891f24eec885f51afd78d208498268c734e256fb13c4/pydantic_settings-2.12.0-py3-none-any.whl", hash = "sha256:fddb9fd99a5b18da837b29710391e945b1e30c135477f484084ee513adb93809", size = 51880 }, ] [[package]] @@ -875,18 +1152,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/75/c1/1c55272f49d761cec38ddb80be9817935b9c91ebd6a8988e10f532868d56/pydash-8.0.6.tar.gz", hash = "sha256:b2821547e9723f69cf3a986be4db64de41730be149b2641947ecd12e1e11025a", size = 164338, upload-time = "2026-01-17T16:42:56.576Z" } +sdist = { url = "https://files.pythonhosted.org/packages/75/c1/1c55272f49d761cec38ddb80be9817935b9c91ebd6a8988e10f532868d56/pydash-8.0.6.tar.gz", hash = "sha256:b2821547e9723f69cf3a986be4db64de41730be149b2641947ecd12e1e11025a", size = 164338 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a5/b7/cc5e7974699db40014d58c7dd7c4ad4ffc244d36930dc9ec7d06ee67d7a9/pydash-8.0.6-py3-none-any.whl", hash = "sha256:ee70a81a5b292c007f28f03a4ee8e75c1f5d7576df5457b836ec7ab2839cc5d0", size = 101561, upload-time = "2026-01-17T16:42:55.448Z" }, + { url = "https://files.pythonhosted.org/packages/a5/b7/cc5e7974699db40014d58c7dd7c4ad4ffc244d36930dc9ec7d06ee67d7a9/pydash-8.0.6-py3-none-any.whl", hash = "sha256:ee70a81a5b292c007f28f03a4ee8e75c1f5d7576df5457b836ec7ab2839cc5d0", size = 101561 }, ] [[package]] name = "pyjwt" version = "2.10.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785, upload-time = "2024-11-28T03:43:29.933Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/46/bd74733ff231675599650d3e47f361794b22ef3e3770998dda30d3b63726/pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953", size = 87785 } wheels = [ - { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, + { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997 }, ] [package.optional-dependencies] @@ -901,27 +1178,36 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "six" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, ] [[package]] name = "python-dotenv" version = "1.2.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221 } wheels = [ - { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230 }, ] [[package]] name = "python-multipart" version = "0.0.21" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196, upload-time = "2025-12-17T09:24:22.446Z" } +sdist = { url = "https://files.pythonhosted.org/packages/78/96/804520d0850c7db98e5ccb70282e29208723f0964e88ffd9d0da2f52ea09/python_multipart-0.0.21.tar.gz", hash = "sha256:7137ebd4d3bbf70ea1622998f902b97a29434a9e8dc40eb203bbcf7c2a2cba92", size = 37196 } wheels = [ - { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541, upload-time = "2025-12-17T09:24:21.153Z" }, + { url = "https://files.pythonhosted.org/packages/aa/76/03af049af4dcee5d27442f71b6924f01f3efb5d2bd34f23fcd563f2cc5f5/python_multipart-0.0.21-py3-none-any.whl", hash = "sha256:cf7a6713e01c87aa35387f4774e812c4361150938d20d232800f75ffcf266090", size = 24541 }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 }, ] [[package]] @@ -929,9 +1215,18 @@ name = "pywin32" version = "311" source = { registry = "https://pypi.org/simple" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, - { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543 }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040 }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102 }, +] + +[[package]] +name = "redis" +version = "7.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/c8/983d5c6579a411d8a99bc5823cc5712768859b5ce2c8afe1a65b37832c81/redis-7.1.0.tar.gz", hash = "sha256:b1cc3cfa5a2cb9c2ab3ba700864fb0ad75617b41f01352ce5779dabf6d5f9c3c", size = 4796669 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/f0/8956f8a86b20d7bb9d6ac0187cf4cd54d8065bc9a1a09eb8011d4d326596/redis-7.1.0-py3-none-any.whl", hash = "sha256:23c52b208f92b56103e17c5d06bdc1a6c2c0b3106583985a76a18f83b265de2b", size = 354159 }, ] [[package]] @@ -943,33 +1238,33 @@ dependencies = [ { name = "rpds-py" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036, upload-time = "2025-10-13T15:30:48.871Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/f5/df4e9027acead3ecc63e50fe1e36aca1523e1719559c499951bb4b53188f/referencing-0.37.0.tar.gz", hash = "sha256:44aefc3142c5b842538163acb373e24cce6632bd54bdb01b21ad5863489f50d8", size = 78036 } wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766, upload-time = "2025-10-13T15:30:47.625Z" }, + { url = "https://files.pythonhosted.org/packages/2c/58/ca301544e1fa93ed4f80d724bf5b194f6e4b945841c5bfd555878eea9fcb/referencing-0.37.0-py3-none-any.whl", hash = "sha256:381329a9f99628c9069361716891d34ad94af76e461dcb0335825aecc7692231", size = 26766 }, ] [[package]] name = "regex" version = "2026.1.15" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811, upload-time = "2026-01-14T23:18:02.775Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/86/07d5056945f9ec4590b518171c4254a5925832eb727b56d3c38a7476f316/regex-2026.1.15.tar.gz", hash = "sha256:164759aa25575cbc0651bef59a0b18353e54300d79ace8084c818ad8ac72b7d5", size = 414811 } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398, upload-time = "2026-01-14T23:14:43.741Z" }, - { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339, upload-time = "2026-01-14T23:14:45.183Z" }, - { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003, upload-time = "2026-01-14T23:14:47.25Z" }, - { url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656, upload-time = "2026-01-14T23:14:48.77Z" }, - { url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252, upload-time = "2026-01-14T23:14:50.528Z" }, - { url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268, upload-time = "2026-01-14T23:14:52.952Z" }, - { url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589, upload-time = "2026-01-14T23:14:55.182Z" }, - { url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700, upload-time = "2026-01-14T23:14:56.707Z" }, - { url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928, upload-time = "2026-01-14T23:14:58.312Z" }, - { url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607, upload-time = "2026-01-14T23:15:00.657Z" }, - { url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729, upload-time = "2026-01-14T23:15:02.248Z" }, - { url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697, upload-time = "2026-01-14T23:15:03.878Z" }, - { url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849, upload-time = "2026-01-14T23:15:06.102Z" }, - { url = "https://files.pythonhosted.org/packages/77/69/c50a63842b6bd48850ebc7ab22d46e7a2a32d824ad6c605b218441814639/regex-2026.1.15-cp312-cp312-win32.whl", hash = "sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913", size = 266279, upload-time = "2026-01-14T23:15:07.678Z" }, - { url = "https://files.pythonhosted.org/packages/f2/36/39d0b29d087e2b11fd8191e15e81cce1b635fcc845297c67f11d0d19274d/regex-2026.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a", size = 277166, upload-time = "2026-01-14T23:15:09.257Z" }, - { url = "https://files.pythonhosted.org/packages/28/32/5b8e476a12262748851fa8ab1b0be540360692325975b094e594dfebbb52/regex-2026.1.15-cp312-cp312-win_arm64.whl", hash = "sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056", size = 270415, upload-time = "2026-01-14T23:15:10.743Z" }, + { url = "https://files.pythonhosted.org/packages/92/81/10d8cf43c807d0326efe874c1b79f22bfb0fb226027b0b19ebc26d301408/regex-2026.1.15-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:4c8fcc5793dde01641a35905d6731ee1548f02b956815f8f1cab89e515a5bdf1", size = 489398 }, + { url = "https://files.pythonhosted.org/packages/90/b0/7c2a74e74ef2a7c32de724658a69a862880e3e4155cba992ba04d1c70400/regex-2026.1.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bfd876041a956e6a90ad7cdb3f6a630c07d491280bfeed4544053cd434901681", size = 291339 }, + { url = "https://files.pythonhosted.org/packages/19/4d/16d0773d0c818417f4cc20aa0da90064b966d22cd62a8c46765b5bd2d643/regex-2026.1.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9250d087bc92b7d4899ccd5539a1b2334e44eee85d848c4c1aef8e221d3f8c8f", size = 289003 }, + { url = "https://files.pythonhosted.org/packages/c6/e4/1fc4599450c9f0863d9406e944592d968b8d6dfd0d552a7d569e43bceada/regex-2026.1.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c8a154cf6537ebbc110e24dabe53095e714245c272da9c1be05734bdad4a61aa", size = 798656 }, + { url = "https://files.pythonhosted.org/packages/b2/e6/59650d73a73fa8a60b3a590545bfcf1172b4384a7df2e7fe7b9aab4e2da9/regex-2026.1.15-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8050ba2e3ea1d8731a549e83c18d2f0999fbc99a5f6bd06b4c91449f55291804", size = 864252 }, + { url = "https://files.pythonhosted.org/packages/6e/ab/1d0f4d50a1638849a97d731364c9a80fa304fec46325e48330c170ee8e80/regex-2026.1.15-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0bf065240704cb8951cc04972cf107063917022511273e0969bdb34fc173456c", size = 912268 }, + { url = "https://files.pythonhosted.org/packages/dd/df/0d722c030c82faa1d331d1921ee268a4e8fb55ca8b9042c9341c352f17fa/regex-2026.1.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c32bef3e7aeee75746748643667668ef941d28b003bfc89994ecf09a10f7a1b5", size = 803589 }, + { url = "https://files.pythonhosted.org/packages/66/23/33289beba7ccb8b805c6610a8913d0131f834928afc555b241caabd422a9/regex-2026.1.15-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d5eaa4a4c5b1906bd0d2508d68927f15b81821f85092e06f1a34a4254b0e1af3", size = 775700 }, + { url = "https://files.pythonhosted.org/packages/e7/65/bf3a42fa6897a0d3afa81acb25c42f4b71c274f698ceabd75523259f6688/regex-2026.1.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:86c1077a3cc60d453d4084d5b9649065f3bf1184e22992bd322e1f081d3117fb", size = 787928 }, + { url = "https://files.pythonhosted.org/packages/f4/f5/13bf65864fc314f68cdd6d8ca94adcab064d4d39dbd0b10fef29a9da48fc/regex-2026.1.15-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:2b091aefc05c78d286657cd4db95f2e6313375ff65dcf085e42e4c04d9c8d410", size = 858607 }, + { url = "https://files.pythonhosted.org/packages/a3/31/040e589834d7a439ee43fb0e1e902bc81bd58a5ba81acffe586bb3321d35/regex-2026.1.15-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:57e7d17f59f9ebfa9667e6e5a1c0127b96b87cb9cede8335482451ed00788ba4", size = 763729 }, + { url = "https://files.pythonhosted.org/packages/9b/84/6921e8129687a427edf25a34a5594b588b6d88f491320b9de5b6339a4fcb/regex-2026.1.15-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:c6c4dcdfff2c08509faa15d36ba7e5ef5fcfab25f1e8f85a0c8f45bc3a30725d", size = 850697 }, + { url = "https://files.pythonhosted.org/packages/8a/87/3d06143d4b128f4229158f2de5de6c8f2485170c7221e61bf381313314b2/regex-2026.1.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:cf8ff04c642716a7f2048713ddc6278c5fd41faa3b9cab12607c7abecd012c22", size = 789849 }, + { url = "https://files.pythonhosted.org/packages/77/69/c50a63842b6bd48850ebc7ab22d46e7a2a32d824ad6c605b218441814639/regex-2026.1.15-cp312-cp312-win32.whl", hash = "sha256:82345326b1d8d56afbe41d881fdf62f1926d7264b2fc1537f99ae5da9aad7913", size = 266279 }, + { url = "https://files.pythonhosted.org/packages/f2/36/39d0b29d087e2b11fd8191e15e81cce1b635fcc845297c67f11d0d19274d/regex-2026.1.15-cp312-cp312-win_amd64.whl", hash = "sha256:4def140aa6156bc64ee9912383d4038f3fdd18fee03a6f222abd4de6357ce42a", size = 277166 }, + { url = "https://files.pythonhosted.org/packages/28/32/5b8e476a12262748851fa8ab1b0be540360692325975b094e594dfebbb52/regex-2026.1.15-cp312-cp312-win_arm64.whl", hash = "sha256:c6c565d9a6e1a8d783c1948937ffc377dd5771e83bd56de8317c450a954d2056", size = 270415 }, ] [[package]] @@ -982,59 +1277,68 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738 }, ] [[package]] name = "rpds-py" version = "0.30.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469, upload-time = "2025-11-30T20:24:38.837Z" } +sdist = { url = "https://files.pythonhosted.org/packages/20/af/3f2f423103f1113b36230496629986e0ef7e199d2aa8392452b484b38ced/rpds_py-0.30.0.tar.gz", hash = "sha256:dd8ff7cf90014af0c0f787eea34794ebf6415242ee1d6fa91eaba725cc441e84", size = 69469 } wheels = [ - { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086, upload-time = "2025-11-30T20:22:17.93Z" }, - { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053, upload-time = "2025-11-30T20:22:19.297Z" }, - { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763, upload-time = "2025-11-30T20:22:21.661Z" }, - { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951, upload-time = "2025-11-30T20:22:23.408Z" }, - { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622, upload-time = "2025-11-30T20:22:25.16Z" }, - { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492, upload-time = "2025-11-30T20:22:26.505Z" }, - { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080, upload-time = "2025-11-30T20:22:27.934Z" }, - { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680, upload-time = "2025-11-30T20:22:29.341Z" }, - { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589, upload-time = "2025-11-30T20:22:31.469Z" }, - { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289, upload-time = "2025-11-30T20:22:32.997Z" }, - { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737, upload-time = "2025-11-30T20:22:34.419Z" }, - { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120, upload-time = "2025-11-30T20:22:35.903Z" }, - { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782, upload-time = "2025-11-30T20:22:37.271Z" }, - { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463, upload-time = "2025-11-30T20:22:39.021Z" }, - { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868, upload-time = "2025-11-30T20:22:40.493Z" }, + { url = "https://files.pythonhosted.org/packages/03/e7/98a2f4ac921d82f33e03f3835f5bf3a4a40aa1bfdc57975e74a97b2b4bdd/rpds_py-0.30.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a161f20d9a43006833cd7068375a94d035714d73a172b681d8881820600abfad", size = 375086 }, + { url = "https://files.pythonhosted.org/packages/4d/a1/bca7fd3d452b272e13335db8d6b0b3ecde0f90ad6f16f3328c6fb150c889/rpds_py-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6abc8880d9d036ecaafe709079969f56e876fcf107f7a8e9920ba6d5a3878d05", size = 359053 }, + { url = "https://files.pythonhosted.org/packages/65/1c/ae157e83a6357eceff62ba7e52113e3ec4834a84cfe07fa4b0757a7d105f/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca28829ae5f5d569bb62a79512c842a03a12576375d5ece7d2cadf8abe96ec28", size = 390763 }, + { url = "https://files.pythonhosted.org/packages/d4/36/eb2eb8515e2ad24c0bd43c3ee9cd74c33f7ca6430755ccdb240fd3144c44/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a1010ed9524c73b94d15919ca4d41d8780980e1765babf85f9a2f90d247153dd", size = 408951 }, + { url = "https://files.pythonhosted.org/packages/d6/65/ad8dc1784a331fabbd740ef6f71ce2198c7ed0890dab595adb9ea2d775a1/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8d1736cfb49381ba528cd5baa46f82fdc65c06e843dab24dd70b63d09121b3f", size = 514622 }, + { url = "https://files.pythonhosted.org/packages/63/8e/0cfa7ae158e15e143fe03993b5bcd743a59f541f5952e1546b1ac1b5fd45/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d948b135c4693daff7bc2dcfc4ec57237a29bd37e60c2fabf5aff2bbacf3e2f1", size = 414492 }, + { url = "https://files.pythonhosted.org/packages/60/1b/6f8f29f3f995c7ffdde46a626ddccd7c63aefc0efae881dc13b6e5d5bb16/rpds_py-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47f236970bccb2233267d89173d3ad2703cd36a0e2a6e92d0560d333871a3d23", size = 394080 }, + { url = "https://files.pythonhosted.org/packages/6d/d5/a266341051a7a3ca2f4b750a3aa4abc986378431fc2da508c5034d081b70/rpds_py-0.30.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:2e6ecb5a5bcacf59c3f912155044479af1d0b6681280048b338b28e364aca1f6", size = 408680 }, + { url = "https://files.pythonhosted.org/packages/10/3b/71b725851df9ab7a7a4e33cf36d241933da66040d195a84781f49c50490c/rpds_py-0.30.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a8fa71a2e078c527c3e9dc9fc5a98c9db40bcc8a92b4e8858e36d329f8684b51", size = 423589 }, + { url = "https://files.pythonhosted.org/packages/00/2b/e59e58c544dc9bd8bd8384ecdb8ea91f6727f0e37a7131baeff8d6f51661/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73c67f2db7bc334e518d097c6d1e6fed021bbc9b7d678d6cc433478365d1d5f5", size = 573289 }, + { url = "https://files.pythonhosted.org/packages/da/3e/a18e6f5b460893172a7d6a680e86d3b6bc87a54c1f0b03446a3c8c7b588f/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5ba103fb455be00f3b1c2076c9d4264bfcb037c976167a6047ed82f23153f02e", size = 599737 }, + { url = "https://files.pythonhosted.org/packages/5c/e2/714694e4b87b85a18e2c243614974413c60aa107fd815b8cbc42b873d1d7/rpds_py-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7cee9c752c0364588353e627da8a7e808a66873672bcb5f52890c33fd965b394", size = 563120 }, + { url = "https://files.pythonhosted.org/packages/6f/ab/d5d5e3bcedb0a77f4f613706b750e50a5a3ba1c15ccd3665ecc636c968fd/rpds_py-0.30.0-cp312-cp312-win32.whl", hash = "sha256:1ab5b83dbcf55acc8b08fc62b796ef672c457b17dbd7820a11d6c52c06839bdf", size = 223782 }, + { url = "https://files.pythonhosted.org/packages/39/3b/f786af9957306fdc38a74cef405b7b93180f481fb48453a114bb6465744a/rpds_py-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:a090322ca841abd453d43456ac34db46e8b05fd9b3b4ac0c78bcde8b089f959b", size = 240463 }, + { url = "https://files.pythonhosted.org/packages/f3/d2/b91dc748126c1559042cfe41990deb92c4ee3e2b415f6b5234969ffaf0cc/rpds_py-0.30.0-cp312-cp312-win_arm64.whl", hash = "sha256:669b1805bd639dd2989b281be2cfd951c6121b65e729d9b843e9639ef1fd555e", size = 230868 }, ] [[package]] name = "six" version = "1.17.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "slack-sdk" +version = "3.39.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b6/dd/645f3eb93fce38eadbb649e85684730b1fc3906c2674ca59bddc2ca2bd2e/slack_sdk-3.39.0.tar.gz", hash = "sha256:6a56be10dc155c436ff658c6b776e1c082e29eae6a771fccf8b0a235822bbcb1", size = 247207 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/1f/32bcf088e535c1870b1a1f2e3b916129c66fdfe565a793316317241d41e5/slack_sdk-3.39.0-py2.py3-none-any.whl", hash = "sha256:b1556b2f5b8b12b94e5ea3f56c4f2c7f04462e4e1013d325c5764ff118044fa8", size = 309850 }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, ] [[package]] name = "sqids" version = "0.5.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/33/5b/98c1b37109210631875092d9e7cb7aef3fda2f03780dd999fe5854afa5f3/sqids-0.5.2.tar.gz", hash = "sha256:5ac08f0c5c9b6814bc2e7c79ee5931e0849d25d95c50e415771b022a44f58af9", size = 18213, upload-time = "2025-05-13T16:36:35.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/33/5b/98c1b37109210631875092d9e7cb7aef3fda2f03780dd999fe5854afa5f3/sqids-0.5.2.tar.gz", hash = "sha256:5ac08f0c5c9b6814bc2e7c79ee5931e0849d25d95c50e415771b022a44f58af9", size = 18213 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/96/178018f3d5b871042e257e9e1db26c6aeb2a704e72cdc884cd2a8918ac2b/sqids-0.5.2-py3-none-any.whl", hash = "sha256:0089ba823e21fd44290c7225f02fb0b5140c36e41959c04d86d3f6f2513799be", size = 8870, upload-time = "2025-05-13T16:36:34.072Z" }, + { url = "https://files.pythonhosted.org/packages/7c/96/178018f3d5b871042e257e9e1db26c6aeb2a704e72cdc884cd2a8918ac2b/sqids-0.5.2-py3-none-any.whl", hash = "sha256:0089ba823e21fd44290c7225f02fb0b5140c36e41959c04d86d3f6f2513799be", size = 8870 }, ] [[package]] @@ -1045,16 +1349,16 @@ dependencies = [ { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393, upload-time = "2026-01-21T18:03:45.119Z" } +sdist = { url = "https://files.pythonhosted.org/packages/06/aa/9ce0f3e7a9829ead5c8ce549392f33a12c4555a6c0609bb27d882e9c7ddf/sqlalchemy-2.0.46.tar.gz", hash = "sha256:cf36851ee7219c170bb0793dbc3da3e80c582e04a5437bc601bfe8c85c9216d7", size = 9865393 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/35/d16bfa235c8b7caba3730bba43e20b1e376d2224f407c178fbf59559f23e/sqlalchemy-2.0.46-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a9a72b0da8387f15d5810f1facca8f879de9b85af8c645138cba61ea147968c", size = 2153405, upload-time = "2026-01-21T19:05:54.143Z" }, - { url = "https://files.pythonhosted.org/packages/06/6c/3192e24486749862f495ddc6584ed730c0c994a67550ec395d872a2ad650/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2347c3f0efc4de367ba00218e0ae5c4ba2306e47216ef80d6e31761ac97cb0b9", size = 3334702, upload-time = "2026-01-21T18:46:45.384Z" }, - { url = "https://files.pythonhosted.org/packages/ea/a2/b9f33c8d68a3747d972a0bb758c6b63691f8fb8a49014bc3379ba15d4274/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9094c8b3197db12aa6f05c51c05daaad0a92b8c9af5388569847b03b1007fb1b", size = 3347664, upload-time = "2026-01-21T18:40:09.979Z" }, - { url = "https://files.pythonhosted.org/packages/aa/d2/3e59e2a91eaec9db7e8dc6b37b91489b5caeb054f670f32c95bcba98940f/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37fee2164cf21417478b6a906adc1a91d69ae9aba8f9533e67ce882f4bb1de53", size = 3277372, upload-time = "2026-01-21T18:46:47.168Z" }, - { url = "https://files.pythonhosted.org/packages/dd/dd/67bc2e368b524e2192c3927b423798deda72c003e73a1e94c21e74b20a85/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b1e14b2f6965a685c7128bd315e27387205429c2e339eeec55cb75ca4ab0ea2e", size = 3312425, upload-time = "2026-01-21T18:40:11.548Z" }, - { url = "https://files.pythonhosted.org/packages/43/82/0ecd68e172bfe62247e96cb47867c2d68752566811a4e8c9d8f6e7c38a65/sqlalchemy-2.0.46-cp312-cp312-win32.whl", hash = "sha256:412f26bb4ba942d52016edc8d12fb15d91d3cd46b0047ba46e424213ad407bcb", size = 2113155, upload-time = "2026-01-21T18:42:49.748Z" }, - { url = "https://files.pythonhosted.org/packages/bc/2a/2821a45742073fc0331dc132552b30de68ba9563230853437cac54b2b53e/sqlalchemy-2.0.46-cp312-cp312-win_amd64.whl", hash = "sha256:ea3cd46b6713a10216323cda3333514944e510aa691c945334713fca6b5279ff", size = 2140078, upload-time = "2026-01-21T18:42:51.197Z" }, - { url = "https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl", hash = "sha256:f9c11766e7e7c0a2767dda5acb006a118640c9fc0a4104214b96269bfb78399e", size = 1937882, upload-time = "2026-01-21T18:22:10.456Z" }, + { url = "https://files.pythonhosted.org/packages/b6/35/d16bfa235c8b7caba3730bba43e20b1e376d2224f407c178fbf59559f23e/sqlalchemy-2.0.46-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3a9a72b0da8387f15d5810f1facca8f879de9b85af8c645138cba61ea147968c", size = 2153405 }, + { url = "https://files.pythonhosted.org/packages/06/6c/3192e24486749862f495ddc6584ed730c0c994a67550ec395d872a2ad650/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2347c3f0efc4de367ba00218e0ae5c4ba2306e47216ef80d6e31761ac97cb0b9", size = 3334702 }, + { url = "https://files.pythonhosted.org/packages/ea/a2/b9f33c8d68a3747d972a0bb758c6b63691f8fb8a49014bc3379ba15d4274/sqlalchemy-2.0.46-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9094c8b3197db12aa6f05c51c05daaad0a92b8c9af5388569847b03b1007fb1b", size = 3347664 }, + { url = "https://files.pythonhosted.org/packages/aa/d2/3e59e2a91eaec9db7e8dc6b37b91489b5caeb054f670f32c95bcba98940f/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37fee2164cf21417478b6a906adc1a91d69ae9aba8f9533e67ce882f4bb1de53", size = 3277372 }, + { url = "https://files.pythonhosted.org/packages/dd/dd/67bc2e368b524e2192c3927b423798deda72c003e73a1e94c21e74b20a85/sqlalchemy-2.0.46-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b1e14b2f6965a685c7128bd315e27387205429c2e339eeec55cb75ca4ab0ea2e", size = 3312425 }, + { url = "https://files.pythonhosted.org/packages/43/82/0ecd68e172bfe62247e96cb47867c2d68752566811a4e8c9d8f6e7c38a65/sqlalchemy-2.0.46-cp312-cp312-win32.whl", hash = "sha256:412f26bb4ba942d52016edc8d12fb15d91d3cd46b0047ba46e424213ad407bcb", size = 2113155 }, + { url = "https://files.pythonhosted.org/packages/bc/2a/2821a45742073fc0331dc132552b30de68ba9563230853437cac54b2b53e/sqlalchemy-2.0.46-cp312-cp312-win_amd64.whl", hash = "sha256:ea3cd46b6713a10216323cda3333514944e510aa691c945334713fca6b5279ff", size = 2140078 }, + { url = "https://files.pythonhosted.org/packages/fc/a1/9c4efa03300926601c19c18582531b45aededfb961ab3c3585f1e24f120b/sqlalchemy-2.0.46-py3-none-any.whl", hash = "sha256:f9c11766e7e7c0a2767dda5acb006a118640c9fc0a4104214b96269bfb78399e", size = 1937882 }, ] [[package]] @@ -1064,9 +1368,9 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0f/7d/eb9565b6a49426552a5bf5c57e7c239c506dc0e4e5315aec6d1e8241dc7c/sqlalchemy_utils-0.42.1.tar.gz", hash = "sha256:881f9cd9e5044dc8f827bccb0425ce2e55490ce44fc0bb848c55cc8ee44cc02e", size = 130789, upload-time = "2025-12-13T03:14:13.591Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/7d/eb9565b6a49426552a5bf5c57e7c239c506dc0e4e5315aec6d1e8241dc7c/sqlalchemy_utils-0.42.1.tar.gz", hash = "sha256:881f9cd9e5044dc8f827bccb0425ce2e55490ce44fc0bb848c55cc8ee44cc02e", size = 130789 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7c/25/7400c18c3ee97914cc99c90007795c00a4ec5b60c853b49db7ba24d11179/sqlalchemy_utils-0.42.1-py3-none-any.whl", hash = "sha256:243cfe1b3a1dae3c74118ae633f1d1e0ed8c787387bc33e556e37c990594ac80", size = 91761, upload-time = "2025-12-13T03:14:15.014Z" }, + { url = "https://files.pythonhosted.org/packages/7c/25/7400c18c3ee97914cc99c90007795c00a4ec5b60c853b49db7ba24d11179/sqlalchemy_utils-0.42.1-py3-none-any.whl", hash = "sha256:243cfe1b3a1dae3c74118ae633f1d1e0ed8c787387bc33e556e37c990594ac80", size = 91761 }, ] [[package]] @@ -1077,9 +1381,9 @@ dependencies = [ { name = "pydantic" }, { name = "sqlalchemy" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/56/b8/e7cd6def4a773f25d6e29ffce63ccbfd6cf9488b804ab6fb9b80d334b39d/sqlmodel-0.0.31.tar.gz", hash = "sha256:2d41a8a9ee05e40736e2f9db8ea28cbfe9b5d4e5a18dd139e80605025e0c516c", size = 94952, upload-time = "2025-12-28T12:35:01.436Z" } +sdist = { url = "https://files.pythonhosted.org/packages/56/b8/e7cd6def4a773f25d6e29ffce63ccbfd6cf9488b804ab6fb9b80d334b39d/sqlmodel-0.0.31.tar.gz", hash = "sha256:2d41a8a9ee05e40736e2f9db8ea28cbfe9b5d4e5a18dd139e80605025e0c516c", size = 94952 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6c/72/5aa5be921800f6418a949a73c9bb7054890881143e6bc604a93d228a95a3/sqlmodel-0.0.31-py3-none-any.whl", hash = "sha256:6d946d56cac4c2db296ba1541357cee2e795d68174e2043cd138b916794b1513", size = 27093, upload-time = "2025-12-28T12:35:00.108Z" }, + { url = "https://files.pythonhosted.org/packages/6c/72/5aa5be921800f6418a949a73c9bb7054890881143e6bc604a93d228a95a3/sqlmodel-0.0.31-py3-none-any.whl", hash = "sha256:6d946d56cac4c2db296ba1541357cee2e795d68174e2043cd138b916794b1513", size = 27093 }, ] [[package]] @@ -1090,9 +1394,9 @@ dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253, upload-time = "2026-01-17T13:11:05.62Z" } +sdist = { url = "https://files.pythonhosted.org/packages/8b/8d/00d280c03ffd39aaee0e86ec81e2d3b9253036a0f93f51d10503adef0e65/sse_starlette-3.2.0.tar.gz", hash = "sha256:8127594edfb51abe44eac9c49e59b0b01f1039d0c7461c6fd91d4e03b70da422", size = 27253 } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763, upload-time = "2026-01-17T13:11:03.775Z" }, + { url = "https://files.pythonhosted.org/packages/96/7f/832f015020844a8b8f7a9cbc103dd76ba8e3875004c41e08440ea3a2b41a/sse_starlette-3.2.0-py3-none-any.whl", hash = "sha256:5876954bd51920fc2cd51baee47a080eb88a37b5b784e615abb0b283f801cdbf", size = 12763 }, ] [[package]] @@ -1103,9 +1407,9 @@ dependencies = [ { name = "anyio" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985, upload-time = "2025-11-01T15:25:27.516Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/b8/73a0e6a6e079a9d9cfa64113d771e421640b6f679a52eeb9b32f72d871a1/starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca", size = 2646985 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033, upload-time = "2025-11-01T15:25:25.461Z" }, + { url = "https://files.pythonhosted.org/packages/d9/52/1064f510b141bd54025f9b55105e26d1fa970b9be67ad766380a3c9b74b0/starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca", size = 74033 }, ] [[package]] @@ -1116,15 +1420,15 @@ dependencies = [ { name = "regex" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/4a/abaec53e93e3ef37224a4dd9e2fc6bb871e7a538c2b6b9d2a6397271daf4/tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6", size = 33437, upload-time = "2024-05-13T18:03:28.793Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/4a/abaec53e93e3ef37224a4dd9e2fc6bb871e7a538c2b6b9d2a6397271daf4/tiktoken-0.7.0.tar.gz", hash = "sha256:1077266e949c24e0291f6c350433c6f0971365ece2b173a23bc3b9f9defef6b6", size = 33437 } wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/46/4cdda4186ce900608f522da34acf442363346688c71b938a90a52d7b84cc/tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908", size = 960446, upload-time = "2024-05-13T18:02:54.409Z" }, - { url = "https://files.pythonhosted.org/packages/b6/30/09ced367d280072d7a3e21f34263dfbbf6378661e7a0f6414e7c18971083/tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410", size = 906652, upload-time = "2024-05-13T18:02:56.25Z" }, - { url = "https://files.pythonhosted.org/packages/e6/7b/c949e4954441a879a67626963dff69096e3c774758b9f2bb0853f7b4e1e7/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704", size = 1047904, upload-time = "2024-05-13T18:02:57.707Z" }, - { url = "https://files.pythonhosted.org/packages/50/81/1842a22f15586072280364c2ab1e40835adaf64e42fe80e52aff921ee021/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350", size = 1079836, upload-time = "2024-05-13T18:02:59.009Z" }, - { url = "https://files.pythonhosted.org/packages/6d/87/51a133a3d5307cf7ae3754249b0faaa91d3414b85c3d36f80b54d6817aa6/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4", size = 1092472, upload-time = "2024-05-13T18:03:00.597Z" }, - { url = "https://files.pythonhosted.org/packages/a5/1f/c93517dc6d3b2c9e988b8e24f87a8b2d4a4ab28920a3a3f3ea338397ae0c/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97", size = 1141881, upload-time = "2024-05-13T18:03:02.743Z" }, - { url = "https://files.pythonhosted.org/packages/bf/4b/48ca098cb580c099b5058bf62c4cb5e90ca6130fa43ef4df27088536245b/tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f", size = 799281, upload-time = "2024-05-13T18:03:04.036Z" }, + { url = "https://files.pythonhosted.org/packages/1d/46/4cdda4186ce900608f522da34acf442363346688c71b938a90a52d7b84cc/tiktoken-0.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:71c55d066388c55a9c00f61d2c456a6086673ab7dec22dd739c23f77195b1908", size = 960446 }, + { url = "https://files.pythonhosted.org/packages/b6/30/09ced367d280072d7a3e21f34263dfbbf6378661e7a0f6414e7c18971083/tiktoken-0.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09ed925bccaa8043e34c519fbb2f99110bd07c6fd67714793c21ac298e449410", size = 906652 }, + { url = "https://files.pythonhosted.org/packages/e6/7b/c949e4954441a879a67626963dff69096e3c774758b9f2bb0853f7b4e1e7/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03c6c40ff1db0f48a7b4d2dafeae73a5607aacb472fa11f125e7baf9dce73704", size = 1047904 }, + { url = "https://files.pythonhosted.org/packages/50/81/1842a22f15586072280364c2ab1e40835adaf64e42fe80e52aff921ee021/tiktoken-0.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20b5c6af30e621b4aca094ee61777a44118f52d886dbe4f02b70dfe05c15350", size = 1079836 }, + { url = "https://files.pythonhosted.org/packages/6d/87/51a133a3d5307cf7ae3754249b0faaa91d3414b85c3d36f80b54d6817aa6/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d427614c3e074004efa2f2411e16c826f9df427d3c70a54725cae860f09e4bf4", size = 1092472 }, + { url = "https://files.pythonhosted.org/packages/a5/1f/c93517dc6d3b2c9e988b8e24f87a8b2d4a4ab28920a3a3f3ea338397ae0c/tiktoken-0.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c46d7af7b8c6987fac9b9f61041b452afe92eb087d29c9ce54951280f899a97", size = 1141881 }, + { url = "https://files.pythonhosted.org/packages/bf/4b/48ca098cb580c099b5058bf62c4cb5e90ca6130fa43ef4df27088536245b/tiktoken-0.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:0bc603c30b9e371e7c4c7935aba02af5994a909fc3c0fe66e7004070858d3f8f", size = 799281 }, ] [[package]] @@ -1134,18 +1438,18 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } wheels = [ - { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, ] [[package]] name = "typing-extensions" version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 } wheels = [ - { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 }, ] [[package]] @@ -1155,27 +1459,39 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949 } wheels = [ - { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611 }, ] [[package]] name = "tzdata" version = "2025.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772, upload-time = "2025-12-13T17:45:35.667Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/a7/c202b344c5ca7daf398f3b8a477eeb205cf3b6f32e7ec3a6bac0629ca975/tzdata-2025.3.tar.gz", hash = "sha256:de39c2ca5dc7b0344f2eba86f49d614019d29f060fc4ebc8a417896a620b56a7", size = 196772 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521, upload-time = "2025-12-13T17:45:33.889Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b0/003792df09decd6849a5e39c28b513c06e84436a54440380862b5aeff25d/tzdata-2025.3-py2.py3-none-any.whl", hash = "sha256:06a47e5700f3081aab02b2e513160914ff0694bce9947d6b76ebd6bf57cfc5d1", size = 348521 }, +] + +[[package]] +name = "tzlocal" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/8b/2e/c14812d3d4d9cd1773c6be938f89e5735a1f11a9f184ac3639b93cef35d5/tzlocal-5.3.1.tar.gz", hash = "sha256:cceffc7edecefea1f595541dbd6e990cb1ea3d19bf01b2809f362a03dd7921fd", size = 30761 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/14/e2a54fabd4f08cd7af1c07030603c3356b74da07f7cc056e600436edfa17/tzlocal-5.3.1-py3-none-any.whl", hash = "sha256:eb1a66c3ef5847adf7a834f1be0800581b683b5608e74f86ecbcef8ab91bb85d", size = 18026 }, ] [[package]] name = "urllib3" version = "2.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556 } wheels = [ - { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584 }, ] [[package]] @@ -1186,27 +1502,75 @@ dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761, upload-time = "2025-12-21T14:16:22.45Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/d1/8f3c683c9561a4e6689dd3b1d345c815f10f86acd044ee1fb9a4dcd0b8c5/uvicorn-0.40.0.tar.gz", hash = "sha256:839676675e87e73694518b5574fd0f24c9d97b46bea16df7b8c05ea1a51071ea", size = 81761 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502 }, +] + +[[package]] +name = "vine" +version = "5.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/e4/d07b5f29d283596b9727dd5275ccbceb63c44a1a82aa9e4bfd20426762ac/vine-5.1.0.tar.gz", hash = "sha256:8b62e981d35c41049211cf62a0a1242d8c1ee9bd15bb196ce38aefd6799e61e0", size = 48980 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/03/ff/7c0c86c43b3cbb927e0ccc0255cb4057ceba4799cd44ae95174ce8e8b5b2/vine-5.1.0-py3-none-any.whl", hash = "sha256:40fdf3c48b2cfe1c38a49e9ae2da6fda88e4794c810050a728bd7413811fb1dc", size = 9636 }, +] + +[[package]] +name = "wcwidth" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c2/62/a7c072fbfefb2980a00f99ca994279cb9ecf310cb2e6b2a4d2a28fe192b3/wcwidth-0.5.3.tar.gz", hash = "sha256:53123b7af053c74e9fe2e92ac810301f6139e64379031f7124574212fb3b4091", size = 157587 } wheels = [ - { url = "https://files.pythonhosted.org/packages/3d/d8/2083a1daa7439a66f3a48589a57d576aa117726762618f6bb09fe3798796/uvicorn-0.40.0-py3-none-any.whl", hash = "sha256:c6c8f55bc8bf13eb6fa9ff87ad62308bbbc33d0b67f84293151efe87e0d5f2ee", size = 68502, upload-time = "2025-12-21T14:16:21.041Z" }, + { url = "https://files.pythonhosted.org/packages/3c/c1/d73f12f8cdb1891334a2ccf7389eed244d3941e74d80dd220badb937f3fb/wcwidth-0.5.3-py3-none-any.whl", hash = "sha256:d584eff31cd4753e1e5ff6c12e1edfdb324c995713f75d26c29807bb84bf649e", size = 92981 }, ] [[package]] name = "websockets" version = "15.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, - { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, - { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, - { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, - { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, - { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, - { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, - { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, - { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, - { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, - { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, - { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437 }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096 }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332 }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152 }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096 }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523 }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790 }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165 }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160 }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395 }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841 }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743 }, +] + +[[package]] +name = "yarl" +version = "1.22.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/63/0c6ebca57330cd313f6102b16dd57ffaf3ec4c83403dcb45dbd15c6f3ea1/yarl-1.22.0.tar.gz", hash = "sha256:bebf8557577d4401ba8bd9ff33906f1376c877aa78d1fe216ad01b4d6745af71", size = 187169 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000 }, + { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338 }, + { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909 }, + { url = "https://files.pythonhosted.org/packages/60/41/9a1fe0b73dbcefce72e46cf149b0e0a67612d60bfc90fb59c2b2efdfbd86/yarl-1.22.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e1651bf8e0398574646744c1885a41198eba53dc8a9312b954073f845c90a8df", size = 372940 }, + { url = "https://files.pythonhosted.org/packages/17/7a/795cb6dfee561961c30b800f0ed616b923a2ec6258b5def2a00bf8231334/yarl-1.22.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b8a0588521a26bf92a57a1705b77b8b59044cdceccac7151bd8d229e66b8dedb", size = 345825 }, + { url = "https://files.pythonhosted.org/packages/d7/93/a58f4d596d2be2ae7bab1a5846c4d270b894958845753b2c606d666744d3/yarl-1.22.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:42188e6a615c1a75bcaa6e150c3fe8f3e8680471a6b10150c5f7e83f47cc34d2", size = 386705 }, + { url = "https://files.pythonhosted.org/packages/61/92/682279d0e099d0e14d7fd2e176bd04f48de1484f56546a3e1313cd6c8e7c/yarl-1.22.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f6d2cb59377d99718913ad9a151030d6f83ef420a2b8f521d94609ecc106ee82", size = 396518 }, + { url = "https://files.pythonhosted.org/packages/db/0f/0d52c98b8a885aeda831224b78f3be7ec2e1aa4a62091f9f9188c3c65b56/yarl-1.22.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50678a3b71c751d58d7908edc96d332af328839eea883bb554a43f539101277a", size = 377267 }, + { url = "https://files.pythonhosted.org/packages/22/42/d2685e35908cbeaa6532c1fc73e89e7f2efb5d8a7df3959ea8e37177c5a3/yarl-1.22.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e8fbaa7cec507aa24ea27a01456e8dd4b6fab829059b69844bd348f2d467124", size = 365797 }, + { url = "https://files.pythonhosted.org/packages/a2/83/cf8c7bcc6355631762f7d8bdab920ad09b82efa6b722999dfb05afa6cfac/yarl-1.22.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:433885ab5431bc3d3d4f2f9bd15bfa1614c522b0f1405d62c4f926ccd69d04fa", size = 365535 }, + { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324 }, + { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803 }, + { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220 }, + { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589 }, + { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213 }, + { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330 }, + { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814 }, ]