From 56e696cdb8c6e1e0d0cb88d71ed96d426cf7abbb Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:27:55 +0100 Subject: [PATCH 01/15] fix(pre-commit): call uv run w/ pytest --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9d67a98..4cb2791 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: hooks: - id: pytest name: pytest - entry: pytest + entry: uv run pytest language: system # Assumes pytest is installed in your environment (via pip install .[dev]) types: [python] # Run on changes to Python files pass_filenames: false # Pytest typically runs on the whole suite From 2d8e2329d0ec4837659e2fe038ac105b894771a7 Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:40:47 +0100 Subject: [PATCH 02/15] refactor: improve config into class M3Config --- py.typed | 0 src/m3/config.py | 89 ----------------------------- src/m3/core/__init__.py | 0 src/m3/core/config.py | 120 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 120 insertions(+), 89 deletions(-) create mode 100644 py.typed delete mode 100644 src/m3/config.py create mode 100644 src/m3/core/__init__.py create mode 100644 src/m3/core/config.py diff --git a/py.typed b/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/src/m3/config.py b/src/m3/config.py deleted file mode 100644 index eaa7e51..0000000 --- a/src/m3/config.py +++ /dev/null @@ -1,89 +0,0 @@ -import logging -from pathlib import Path - -APP_NAME = "m3" - -# Setup basic logging -logging.basicConfig( - level=logging.INFO, - format="%(asctime)s [%(levelname)-8s] %(name)s: %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", -) -logger = logging.getLogger(APP_NAME) - - -# ------------------------------------------------------------------- -# Data directory rooted at project root (two levels up from this file) -# ------------------------------------------------------------------- -def _get_project_root() -> Path: - """ - Determine project root: - - If cloned repo: use repository root (two levels up from this file) - - If pip installed: ALWAYS use home directory - """ - package_root = Path(__file__).resolve().parents[2] - - # Check if we're in a cloned repository (has pyproject.toml at root) - if (package_root / "pyproject.toml").exists(): - return package_root - - # Pip installed: ALWAYS use home directory (simple and consistent) - return Path.home() - - -_PROJECT_ROOT = _get_project_root() -_PROJECT_DATA_DIR = _PROJECT_ROOT / "m3_data" - -DEFAULT_DATABASES_DIR = _PROJECT_DATA_DIR / "databases" -DEFAULT_RAW_FILES_DIR = _PROJECT_DATA_DIR / "raw_files" - - -# -------------------------------------------------- -# Dataset configurations (add more entries as needed) -# -------------------------------------------------- -SUPPORTED_DATASETS = { - "mimic-iv-demo": { - "file_listing_url": "https://physionet.org/files/mimic-iv-demo/2.2/", - "subdirectories_to_scan": ["hosp", "icu"], - "default_db_filename": "mimic_iv_demo.db", - "primary_verification_table": "hosp_admissions", # Table name in SQLite DB - }, - # add other datasets here... -} - - -# -------------------------------------------------- -# Helper functions -# -------------------------------------------------- -def get_dataset_config(dataset_name: str) -> dict | None: - """Retrieve the configuration for a given dataset (case-insensitive).""" - return SUPPORTED_DATASETS.get(dataset_name.lower()) - - -def get_default_database_path(dataset_name: str) -> Path | None: - """ - Return the default SQLite DB path for a given dataset, - under /m3_data/databases/. - """ - cfg = get_dataset_config(dataset_name) - if cfg and "default_db_filename" in cfg: - DEFAULT_DATABASES_DIR.mkdir(parents=True, exist_ok=True) - return DEFAULT_DATABASES_DIR / cfg["default_db_filename"] - - logger.warning(f"Missing default_db_filename for dataset: {dataset_name}") - return None - - -def get_dataset_raw_files_path(dataset_name: str) -> Path | None: - """ - Return the raw-file storage path for a dataset, - under /m3_data/raw_files//. - """ - cfg = get_dataset_config(dataset_name) - if cfg: - path = DEFAULT_RAW_FILES_DIR / dataset_name.lower() - path.mkdir(parents=True, exist_ok=True) - return path - - logger.warning(f"Unknown dataset, cannot determine raw path: {dataset_name}") - return None diff --git a/src/m3/core/__init__.py b/src/m3/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/m3/core/config.py b/src/m3/core/config.py new file mode 100644 index 0000000..09adde2 --- /dev/null +++ b/src/m3/core/config.py @@ -0,0 +1,120 @@ +import logging +import os +from pathlib import Path + +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional + +from m3.core.tool.base import BaseTool +from m3.core.utils.exceptions import M3ConfigError +from m3.core.utils.logging import setup_logging + +logger = logging.getLogger(__name__) + + +@beartype +class M3Config: + def __init__( + self, + log_level: str = "INFO", + env_vars: Optional[Dict[str, str]] = None, + ) -> None: + self.log_level = log_level + self.env_vars = env_vars or {} + self._set_paths() + self._apply_config() + + def to_dict(self) -> Dict[str, Any]: + return { + "log_level": self.log_level, + "env_vars": self.env_vars.copy(), + } + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "M3Config": + try: + return cls( + log_level=data["log_level"], + env_vars=data["env_vars"], + ) + except KeyError as e: + raise M3ConfigError(f"Missing required config key: {e}") from e + + def get_env_var( + self, key: str, default: Optional[Any] = None, raise_if_missing: bool = False + ) -> Any: + value = self.env_vars.get(key, os.getenv(key, default)) + if value is None and raise_if_missing: + raise M3ConfigError( + f"Missing required env var: {key}", + details="Check your environment variables or config initialization.", + ) + logger.debug(f"Accessed env var '{key}': {'[set]' if value else '[unset]'}") + return value or "" + + def validate_for_tools(self, tools: List[BaseTool]) -> None: + errors = [] + for tool in tools: + for req_key, req_default in tool.required_env_vars.items(): + prefixed_key = f"{tool.__class__.__name__.upper()}_{req_key}" + if prefixed_key in self.env_vars or prefixed_key in os.environ: + key_for_error_check = prefixed_key + else: + key_for_error_check = req_key + error_message = self._get_env_var_error( + key_for_error_check, req_default + ) + if error_message: + errors.append( + f"Config validation failed for tool '{tool.__class__.__name__}': {error_message}" + ) + if errors: + raise M3ConfigError("\n".join(errors)) + logger.info(f"Validated config for {len(tools)} tools.") + + def merge_env(self, new_env: Dict[str, str], prefix: str = "") -> None: + for key, value in new_env.items(): + prefixed_key = f"{prefix}{key}" if prefix else key + if prefixed_key in self.env_vars and self.env_vars[prefixed_key] != value: + raise M3ConfigError( + f"Env conflict: {prefixed_key} ({self.env_vars[prefixed_key]} vs {value})" + ) + self.env_vars[prefixed_key] = value + logger.debug(f"Merged env: {prefixed_key} = {value}") + + def _set_paths(self) -> None: + self.project_root = self._get_project_root() + self.data_dir = self._get_data_dir() + self.databases_dir = self.data_dir / "databases" + self.raw_files_dir = self.data_dir / "raw_files" + + def _get_project_root(self) -> Path: + package_root = Path(__file__).resolve().parents[3] + if (package_root / "pyproject.toml").exists(): + return package_root + return Path.home() + + def _get_data_dir(self) -> Path: + data_dir_str = self.get_env_var("M3_DATA_DIR") + if data_dir_str: + return Path(data_dir_str) + return self.project_root / "m3_data" + + def _apply_config(self) -> None: + try: + setup_logging(level=self.log_level) + except ValueError as e: + raise M3ConfigError( + f"Invalid log level: {self.log_level}", + details="Log level must be one of: DEBUG, INFO, WARNING, ERROR, CRITICAL", + ) from e + + for key, value in self.env_vars.items(): + os.environ[key] = value + + def _get_env_var_error(self, key: str, default: Optional[str]) -> Optional[str]: + try: + self.get_env_var(key, default=default, raise_if_missing=default is None) + return None + except M3ConfigError as e: + return str(e) From 0af9ef64ce812f2e2cb779e2255d63dd6c072dfc Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:42:36 +0100 Subject: [PATCH 03/15] refactor(core): centralise M3 exceptions, helpers & logging --- src/m3/core/utils/__init__.py | 0 src/m3/core/utils/exceptions.py | 55 +++++++++++++++++++++++++++++++++ src/m3/core/utils/helpers.py | 15 +++++++++ src/m3/core/utils/logging.py | 27 ++++++++++++++++ 4 files changed, 97 insertions(+) create mode 100644 src/m3/core/utils/__init__.py create mode 100644 src/m3/core/utils/exceptions.py create mode 100644 src/m3/core/utils/helpers.py create mode 100644 src/m3/core/utils/logging.py diff --git a/src/m3/core/utils/__init__.py b/src/m3/core/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/m3/core/utils/exceptions.py b/src/m3/core/utils/exceptions.py new file mode 100644 index 0000000..de243fc --- /dev/null +++ b/src/m3/core/utils/exceptions.py @@ -0,0 +1,55 @@ +from beartype import beartype + + +@beartype +class M3Error(Exception): + ISSUE_REPORT_URL: str = ( + "https://github.com/rafiattrach/m3/issues/new?template=bug_report.yaml" + ) + + def __init__(self, message: str, details: str | None = None) -> None: + self.message = message + self.details = details + super().__init__(message) + + def __str__(self) -> str: + base_msg = f"M3 Library Error: {self.message}" + if self.details: + base_msg += f"\nHere are some more details: {self.details}" + base_msg += f"\nIf you think this is a bug, please report it at: {self.ISSUE_REPORT_URL}" + return base_msg + + +@beartype +class M3ValidationError(M3Error): + """General validation error for M3 configurations and setups.""" + + +@beartype +class M3InitializationError(M3Error): + """Raised when initialization fails (e.g., backend setup).""" + + +@beartype +class M3ConfigError(M3Error): + """Raised for configuration-specific issues.""" + + +@beartype +class M3PresetError(M3Error): + """Raised when preset loading or application fails.""" + + +@beartype +class M3BuildError(M3Error): + """Raised during build process failures.""" + + +@beartype +class AuthenticationError(M3Error): + """Raised when authentication fails.""" + + +@beartype +class TokenValidationError(M3Error): + """Raised when token validation fails.""" diff --git a/src/m3/core/utils/helpers.py b/src/m3/core/utils/helpers.py new file mode 100644 index 0000000..99310df --- /dev/null +++ b/src/m3/core/utils/helpers.py @@ -0,0 +1,15 @@ +import logging +import os + +from beartype import beartype + +from m3.core.config import M3Config + +logger = logging.getLogger(__name__) + + +@beartype +def get_config(env_override: bool = True) -> M3Config: + env_vars = os.environ.copy() if env_override else {} + config = M3Config(env_vars=env_vars) + return config diff --git a/src/m3/core/utils/logging.py b/src/m3/core/utils/logging.py new file mode 100644 index 0000000..37f1ab4 --- /dev/null +++ b/src/m3/core/utils/logging.py @@ -0,0 +1,27 @@ +import logging + +from beartype import beartype +from beartype.typing import Optional + + +@beartype +def setup_logging( + level: str = "INFO", + force: bool = False, + format_str: Optional[str] = None, + datefmt: Optional[str] = None, +) -> None: # pragma: no cover + root = logging.getLogger() + effective_format = ( + format_str or "%(asctime)s [%(levelname)-8s] %(name)s: %(message)s" + ) + effective_datefmt = datefmt or "%Y-%m-%d %H:%M:%S" + + if force or not root.handlers: + logging.basicConfig( + level=level, + format=effective_format, + datefmt=effective_datefmt, + ) + else: + root.setLevel(level) From 48ae186daa3852a3b640f9009f546524e0d17fe1 Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:43:21 +0100 Subject: [PATCH 04/15] feat(core): add preset base w/ default_M3 preset --- src/m3/core/preset/__init__.py | 0 src/m3/core/preset/base.py | 14 ++++ src/m3/core/preset/presets/__init__.py | 0 src/m3/core/preset/presets/default_preset.py | 67 ++++++++++++++++++++ src/m3/core/preset/registry.py | 6 ++ 5 files changed, 87 insertions(+) create mode 100644 src/m3/core/preset/__init__.py create mode 100644 src/m3/core/preset/base.py create mode 100644 src/m3/core/preset/presets/__init__.py create mode 100644 src/m3/core/preset/presets/default_preset.py create mode 100644 src/m3/core/preset/registry.py diff --git a/src/m3/core/preset/__init__.py b/src/m3/core/preset/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/m3/core/preset/base.py b/src/m3/core/preset/base.py new file mode 100644 index 0000000..d0f6b80 --- /dev/null +++ b/src/m3/core/preset/base.py @@ -0,0 +1,14 @@ +from abc import ABC, abstractmethod + +from beartype import beartype + +from m3.core.config import M3Config +from m3.m3 import M3 + + +@beartype +class Preset(ABC): + @classmethod + @abstractmethod + def create(cls, config: M3Config | None = None, **kwargs: dict) -> M3: + """Create an M3 instance based on the provided configuration and kwargs.""" diff --git a/src/m3/core/preset/presets/__init__.py b/src/m3/core/preset/presets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/m3/core/preset/presets/default_preset.py b/src/m3/core/preset/presets/default_preset.py new file mode 100644 index 0000000..3a4979e --- /dev/null +++ b/src/m3/core/preset/presets/default_preset.py @@ -0,0 +1,67 @@ +import logging +import os +from pathlib import Path + +from beartype import beartype + +from m3.core.config import M3Config +from m3.core.preset.base import Preset +from m3.core.tool.backend.backends.bigquery import BigQueryBackend +from m3.core.tool.backend.backends.sqlite import SQLiteBackend +from m3.core.utils.exceptions import M3PresetError, M3ValidationError +from m3.m3 import M3 +from m3.tools.mimic import MIMIC +from m3.tools.mimic.components.utils import get_default_database_path + +logger = logging.getLogger(__name__) + + +@beartype +class DefaultM3Preset(Preset): + @classmethod + def create( + cls, + config: M3Config | None = None, + **kwargs: dict, + ) -> M3: + _config = config or M3Config(env_vars=os.environ.copy()) + _backend = cls._determine_backend(_config) + _backends = cls._create_backends(_config, _backend) + _tool = MIMIC(backends=_backends, backend_key=_backend, config=_config) + m3 = M3(config=_config).with_tool(_tool) + cls._build_and_validate_m3(m3) + return m3 + + @classmethod + def _determine_backend(cls, config: M3Config) -> str: + _backend = config.get_env_var("M3_BACKEND", "sqlite").lower() + logger.info(f"Creating default preset with backend: {_backend}") + return _backend + + @classmethod + def _create_backends( + cls, config: M3Config, backend: str + ) -> list[SQLiteBackend | BigQueryBackend]: + if backend == "sqlite": + db_path = config.get_env_var("M3_DB_PATH") + default_db = get_default_database_path(config, "mimic-iv-demo") + if default_db is None: + raise M3PresetError("Cannot determine default DB path for preset") + path = Path(db_path) if db_path else default_db + logger.debug(f"Using SQLite DB path: {path}") + return [SQLiteBackend(str(path))] + elif backend == "bigquery": + project_id = config.get_env_var("M3_PROJECT_ID", raise_if_missing=True) + logger.debug(f"Using BigQuery project ID: {project_id}") + return [BigQueryBackend(project_id)] + else: + raise M3PresetError(f"Invalid backend for preset: {backend}") + + @classmethod + def _build_and_validate_m3(cls, m3: M3) -> None: + try: + m3.build() + logger.info("Preset build successful") + except M3ValidationError as e: + logger.error(f"Preset build failed: {e}") + raise M3PresetError("Preset build validation failed", details=str(e)) from e diff --git a/src/m3/core/preset/registry.py b/src/m3/core/preset/registry.py new file mode 100644 index 0000000..18fcedb --- /dev/null +++ b/src/m3/core/preset/registry.py @@ -0,0 +1,6 @@ +from m3.core.preset.base import Preset +from m3.core.preset.presets.default_preset import DefaultM3Preset + +ALL_PRESETS: dict[str, type[Preset]] = { + "default_m3": DefaultM3Preset, +} From bcfd4594d3b574d2326660c19a63165781d6e070 Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:44:30 +0100 Subject: [PATCH 05/15] refactor(core): add MCP Conf. Gen. base w/ Claude,FastMCP and Universal primitives --- src/m3/core/mcp_config_generator/__init__.py | 4 + src/m3/core/mcp_config_generator/base.py | 26 + .../mcp_config_generators/__init__.py | 0 .../claude_mcp_config.py | 142 ++++++ .../mcp_config_generators/fast_mcp_config.py | 106 +++++ .../universal_mcp_config.py | 40 ++ src/m3/core/mcp_config_generator/registry.py | 16 + src/m3/mcp_client_configs/__init__.py | 6 - .../mcp_client_configs/dynamic_mcp_config.py | 450 ------------------ .../setup_claude_desktop.py | 280 ----------- 10 files changed, 334 insertions(+), 736 deletions(-) create mode 100644 src/m3/core/mcp_config_generator/__init__.py create mode 100644 src/m3/core/mcp_config_generator/base.py create mode 100644 src/m3/core/mcp_config_generator/mcp_config_generators/__init__.py create mode 100644 src/m3/core/mcp_config_generator/mcp_config_generators/claude_mcp_config.py create mode 100644 src/m3/core/mcp_config_generator/mcp_config_generators/fast_mcp_config.py create mode 100644 src/m3/core/mcp_config_generator/mcp_config_generators/universal_mcp_config.py create mode 100644 src/m3/core/mcp_config_generator/registry.py delete mode 100644 src/m3/mcp_client_configs/__init__.py delete mode 100644 src/m3/mcp_client_configs/dynamic_mcp_config.py delete mode 100644 src/m3/mcp_client_configs/setup_claude_desktop.py diff --git a/src/m3/core/mcp_config_generator/__init__.py b/src/m3/core/mcp_config_generator/__init__.py new file mode 100644 index 0000000..82e7706 --- /dev/null +++ b/src/m3/core/mcp_config_generator/__init__.py @@ -0,0 +1,4 @@ +from .base import MCPConfigGenerator +from .registry import ALL_MCP_CONFIG_GENERATORS + +__all__ = ["ALL_MCP_CONFIG_GENERATORS", "MCPConfigGenerator"] diff --git a/src/m3/core/mcp_config_generator/base.py b/src/m3/core/mcp_config_generator/base.py new file mode 100644 index 0000000..344fd91 --- /dev/null +++ b/src/m3/core/mcp_config_generator/base.py @@ -0,0 +1,26 @@ +from abc import ABC, abstractmethod + +from beartype import beartype +from beartype.typing import TYPE_CHECKING, List, Optional + +if TYPE_CHECKING: + from m3.m3 import M3 + + +@beartype +class MCPConfigGenerator(ABC): + @classmethod + @abstractmethod + def generate( + cls, + m3: "M3", + command: Optional[str] = None, + args: Optional[List[str]] = None, + cwd: Optional[str] = None, + module_name: Optional[str] = None, + pipeline_config_path: Optional[str] = None, + save_path: Optional[str] = None, + ) -> dict | str: + """ + Generate an MCP configuration based on the provided M3 instance. + """ diff --git a/src/m3/core/mcp_config_generator/mcp_config_generators/__init__.py b/src/m3/core/mcp_config_generator/mcp_config_generators/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/m3/core/mcp_config_generator/mcp_config_generators/claude_mcp_config.py b/src/m3/core/mcp_config_generator/mcp_config_generators/claude_mcp_config.py new file mode 100644 index 0000000..cdb2c29 --- /dev/null +++ b/src/m3/core/mcp_config_generator/mcp_config_generators/claude_mcp_config.py @@ -0,0 +1,142 @@ +import json +import logging +import os +import shutil +from pathlib import Path + +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional + +from m3.core.mcp_config_generator.base import MCPConfigGenerator +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +class ClaudeConfigGenerator(MCPConfigGenerator): + @classmethod + def generate( + cls, + m3: "m3.m3.M3", # noqa: F821 + command: Optional[str] = None, + args: Optional[List[str]] = None, + cwd: Optional[str] = None, + module_name: Optional[str] = None, + pipeline_config_path: Optional[str] = None, + save_path: Optional[str] = None, + ) -> Dict[str, Any]: + _command = cls._get_command(command, m3) + _module_name = cls._get_module_name(module_name, m3) + _args = cls._get_args(args, m3, _module_name) + _cwd = cls._get_cwd(cwd, m3) + + if not shutil.which(_command): + raise M3ValidationError(f"Invalid command '{_command}': Not found on PATH.") + if not os.path.isdir(_cwd): + raise M3ValidationError(f"Invalid cwd '{_cwd}': Directory does not exist.") + + env = m3.config.env_vars.copy() + if pipeline_config_path: + env["M3_CONFIG_PATH"] = pipeline_config_path + + logger.debug( + f"Generating Claude config with command='{_command}', args={_args}, cwd='{_cwd}', pipeline_config_path='{pipeline_config_path}'" + ) + + claude_config = { + "mcpServers": { + "m3": { + "command": _command, + "args": _args, + "cwd": _cwd, + "env": env, + } + } + } + + cls._save_config(claude_config, save_path) + + logger.debug("Claude config generated successfully") + return claude_config + + @staticmethod + def _get_command(command: Optional[str], m3: "m3.m3.M3") -> str: # noqa: F821 + if command is not None: + return command + + if "VIRTUAL_ENV" in os.environ: + venv_python = Path(os.environ["VIRTUAL_ENV"]) / "bin" / "python" + if venv_python.exists(): + return str(venv_python) + + default_python = shutil.which("python") or shutil.which("python3") or "python" + return m3.config.get_env_var("M3_COMMAND", default_python) + + @staticmethod + def _get_module_name(module_name: Optional[str], m3: "m3.m3.M3") -> str: # noqa: F821 + if module_name is not None: + return module_name + + return m3.config.get_env_var("M3_MODULE", "m3.core.server") + + @staticmethod + def _get_args( + args: Optional[List[str]], + m3: "m3.m3.M3", # noqa: F821 + module_name: str, + ) -> List[str]: + if args is not None: + return args + + return m3.config.get_env_var("M3_ARGS", ["-m", module_name]) + + @staticmethod + def _get_cwd(cwd: Optional[str], m3: "m3.m3.M3") -> str: # noqa: F821 + if cwd is not None: + return cwd + + return m3.config.get_env_var("M3_CWD", os.getcwd()) + + @staticmethod + def _save_config(config: Dict[str, Any], save_path: Optional[str]) -> None: + if not save_path: + claude_config_path = ClaudeConfigGenerator._get_claude_config_path() + if claude_config_path: + existing_config = {} + if claude_config_path.exists(): + with open(claude_config_path) as f: + existing_config = json.load(f) + existing_config.setdefault("mcpServers", {}).update( + config["mcpServers"] + ) + with open(claude_config_path, "w") as f: + json.dump(existing_config, f, indent=2) + logger.info( + f"โœ… Claude config merged and saved to {claude_config_path}." + ) + return + else: + save_path = "m3_claude_config.json" + + with open(save_path, "w") as f: + json.dump(config, f, indent=2) + logger.info(f"โœ… Claude config saved to {save_path}") + + @staticmethod + def _get_claude_config_path() -> Optional[Path]: + home = Path.home() + paths = [ + home + / "Library" + / "Application Support" + / "Claude" + / "claude_desktop_config.json", # macOS + home + / "AppData" + / "Roaming" + / "Claude" + / "claude_desktop_config.json", # Windows + home / ".config" / "Claude" / "claude_desktop_config.json", # Linux + ] + return next((path for path in paths if path.parent.exists()), None) diff --git a/src/m3/core/mcp_config_generator/mcp_config_generators/fast_mcp_config.py b/src/m3/core/mcp_config_generator/mcp_config_generators/fast_mcp_config.py new file mode 100644 index 0000000..0855d50 --- /dev/null +++ b/src/m3/core/mcp_config_generator/mcp_config_generators/fast_mcp_config.py @@ -0,0 +1,106 @@ +import json +import logging +import os +import shutil +from pathlib import Path + +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional + +from m3.core.mcp_config_generator.base import MCPConfigGenerator +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +class FastMCPConfigGenerator(MCPConfigGenerator): + @classmethod + def generate( + cls, + m3: "m3.m3.M3", # noqa: F821 + command: Optional[str] = None, + args: Optional[List[str]] = None, + cwd: Optional[str] = None, + module_name: Optional[str] = None, + pipeline_config_path: Optional[str] = None, + save_path: Optional[str] = None, + ) -> Dict[str, Any]: + _command = cls._get_command(command, m3) + _module_name = cls._get_module_name(module_name, m3) + _args = cls._get_args(args, m3, _module_name) + _cwd = cls._get_cwd(cwd, m3) + + if not shutil.which(_command): + raise M3ValidationError(f"Invalid command '{_command}': Not found on PATH.") + if not os.path.isdir(_cwd): + raise M3ValidationError(f"Invalid cwd '{_cwd}': Directory does not exist.") + + env = m3.config.env_vars.copy() + if pipeline_config_path: + env["M3_CONFIG_PATH"] = pipeline_config_path + + logger.debug( + f"Generating FastMCP config with command='{_command}', args={_args}, cwd='{_cwd}', pipeline_config_path='{pipeline_config_path}'" + ) + + config = { + "mcpServers": { + "m3": { + "command": _command, + "args": _args, + "cwd": _cwd, + "env": env, + } + } + } + + cls._save_config(config, save_path) + + logger.debug("FastMCP config generated successfully") + return config + + @staticmethod + def _get_command(command: Optional[str], m3: "m3.m3.M3") -> str: # noqa: F821 + if command is not None: + return command + + if "VIRTUAL_ENV" in os.environ: + venv_python = Path(os.environ["VIRTUAL_ENV"]) / "bin" / "python" + if venv_python.exists(): + return str(venv_python) + + default_python = shutil.which("python") or shutil.which("python3") or "python" + return m3.config.get_env_var("M3_COMMAND", default_python) + + @staticmethod + def _get_module_name(module_name: Optional[str], m3: "m3.m3.M3") -> str: # noqa: F821 + if module_name is not None: + return module_name + + return m3.config.get_env_var("M3_MODULE", "m3.core.server") + + @staticmethod + def _get_args( + args: Optional[List[str]], + m3: "m3.m3.M3", # noqa: F821 + module_name: str, + ) -> List[str]: + if args is not None: + return args + + return m3.config.get_env_var("M3_ARGS", ["-m", module_name]) + + @staticmethod + def _get_cwd(cwd: Optional[str], m3: "m3.m3.M3") -> str: # noqa: F821 + if cwd is not None: + return cwd + + return m3.config.get_env_var("M3_CWD", os.getcwd()) + + @staticmethod + def _save_config(config: Dict[str, Any], save_path: Optional[str]) -> None: + if save_path: + with open(save_path, "w") as f: + json.dump(config, f, indent=2) + logger.info(f"โœ… FastMCP config saved to {save_path}.") diff --git a/src/m3/core/mcp_config_generator/mcp_config_generators/universal_mcp_config.py b/src/m3/core/mcp_config_generator/mcp_config_generators/universal_mcp_config.py new file mode 100644 index 0000000..38fdc24 --- /dev/null +++ b/src/m3/core/mcp_config_generator/mcp_config_generators/universal_mcp_config.py @@ -0,0 +1,40 @@ +import json +import logging + +from beartype import beartype +from beartype.typing import List, Optional + +from m3.core.mcp_config_generator.base import MCPConfigGenerator + +logger = logging.getLogger(__name__) + + +@beartype +class UniversalConfigGenerator(MCPConfigGenerator): # pragma: no cover + @classmethod + def generate( + cls, + m3: "m3.m3.M3", # noqa: F821 + command: Optional[str] = None, + args: Optional[List[str]] = None, + cwd: Optional[str] = None, + module_name: Optional[str] = None, + pipeline_config_path: Optional[str] = None, + save_path: Optional[str] = None, + ) -> dict: + env = m3.config.env_vars.copy() + if pipeline_config_path: + env["M3_CONFIG_PATH"] = pipeline_config_path + + logger.debug("Generating Universal config") + + config = m3.__dict__ + config["env_vars"] = env + + if save_path: + with open(save_path, "w") as f: + json.dump(config, f, indent=2) + logger.info(f"โœ… Universal config saved to {save_path}.") + + logger.debug("Universal config generated successfully") + return config diff --git a/src/m3/core/mcp_config_generator/registry.py b/src/m3/core/mcp_config_generator/registry.py new file mode 100644 index 0000000..4d42913 --- /dev/null +++ b/src/m3/core/mcp_config_generator/registry.py @@ -0,0 +1,16 @@ +from m3.core.mcp_config_generator import MCPConfigGenerator +from m3.core.mcp_config_generator.mcp_config_generators.claude_mcp_config import ( + ClaudeConfigGenerator, +) +from m3.core.mcp_config_generator.mcp_config_generators.fast_mcp_config import ( + FastMCPConfigGenerator, +) +from m3.core.mcp_config_generator.mcp_config_generators.universal_mcp_config import ( + UniversalConfigGenerator, +) + +ALL_MCP_CONFIG_GENERATORS: dict[str, type[MCPConfigGenerator]] = { + "fastmcp": FastMCPConfigGenerator, + "claude": ClaudeConfigGenerator, + "universal": UniversalConfigGenerator, +} diff --git a/src/m3/mcp_client_configs/__init__.py b/src/m3/mcp_client_configs/__init__.py deleted file mode 100644 index 47c8700..0000000 --- a/src/m3/mcp_client_configs/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -""" -MCP client configuration utilities. - -This package contains scripts for configuring various MCP clients -with the M3 server. -""" diff --git a/src/m3/mcp_client_configs/dynamic_mcp_config.py b/src/m3/mcp_client_configs/dynamic_mcp_config.py deleted file mode 100644 index d49d776..0000000 --- a/src/m3/mcp_client_configs/dynamic_mcp_config.py +++ /dev/null @@ -1,450 +0,0 @@ -""" -Dynamic MCP Configuration Generator for M3 Server. -Generates MCP server configurations that can be copied and pasted into any MCP client. -""" - -import json -import os -import shutil -import sys -from pathlib import Path -from typing import Any - -# Error messages -_DATABASE_PATH_ERROR_MSG = ( - "Could not determine default database path for mimic-iv-demo.\n" - "Please run 'm3 init mimic-iv-demo' first." -) - - -class MCPConfigGenerator: - """Generator for MCP server configurations.""" - - def __init__(self): - self.current_dir = Path(__file__).parent.parent.absolute() - self.default_python = self._get_default_python() - - def _get_default_python(self) -> str: - """Get the default Python executable path.""" - # Try to use the current virtual environment - if "VIRTUAL_ENV" in os.environ: - venv_python = Path(os.environ["VIRTUAL_ENV"]) / "bin" / "python" - if venv_python.exists(): - return str(venv_python) - - # Fall back to system python - return shutil.which("python") or shutil.which("python3") or "python" - - def _validate_python_path(self, python_path: str) -> bool: - """Validate that the Python path exists and is executable.""" - path = Path(python_path) - return path.exists() and path.is_file() and os.access(path, os.X_OK) - - def _validate_directory(self, dir_path: str) -> bool: - """Validate that the directory exists.""" - return Path(dir_path).exists() and Path(dir_path).is_dir() - - def generate_config( - self, - server_name: str = "m3", - python_path: str | None = None, - working_directory: str | None = None, - backend: str = "sqlite", - db_path: str | None = None, - project_id: str | None = None, - additional_env: dict[str, str] | None = None, - module_name: str = "m3.mcp_server", - oauth2_enabled: bool = False, - oauth2_config: dict[str, str] | None = None, - ) -> dict[str, Any]: - """Generate MCP server configuration.""" - - # Use defaults if not provided - if python_path is None: - python_path = self.default_python - if working_directory is None: - working_directory = str(self.current_dir) - - # Validate inputs - if not self._validate_python_path(python_path): - raise ValueError(f"Invalid Python path: {python_path}") - if not self._validate_directory(working_directory): - raise ValueError(f"Invalid working directory: {working_directory}") - - # Build environment variables - env = { - "PYTHONPATH": str(Path(working_directory) / "src"), - "M3_BACKEND": backend, - } - - # Add backend-specific environment variables - if backend == "sqlite" and db_path: - env["M3_DB_PATH"] = db_path - elif backend == "bigquery" and project_id: - env["M3_PROJECT_ID"] = project_id - env["GOOGLE_CLOUD_PROJECT"] = project_id - - # Add OAuth2 configuration if enabled - if oauth2_enabled and oauth2_config: - env.update( - { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": oauth2_config.get("issuer_url", ""), - "M3_OAUTH2_AUDIENCE": oauth2_config.get("audience", ""), - "M3_OAUTH2_REQUIRED_SCOPES": oauth2_config.get( - "required_scopes", "read:mimic-data" - ), - "M3_OAUTH2_JWKS_URL": oauth2_config.get("jwks_url", ""), - } - ) - - # Optional OAuth2 settings - if oauth2_config.get("client_id"): - env["M3_OAUTH2_CLIENT_ID"] = oauth2_config["client_id"] - if oauth2_config.get("rate_limit_requests"): - env["M3_OAUTH2_RATE_LIMIT_REQUESTS"] = str( - oauth2_config["rate_limit_requests"] - ) - - # Add any additional environment variables - if additional_env: - env.update(additional_env) - - # Create the configuration - config = { - "mcpServers": { - server_name: { - "command": python_path, - "args": ["-m", module_name], - "cwd": working_directory, - "env": env, - } - } - } - - return config - - def interactive_config(self) -> dict[str, Any]: - """Interactive configuration builder.""" - print("๐Ÿ”ง M3 MCP Server Configuration Generator") - print("=" * 50) - - # Server name - print("\n๐Ÿท๏ธ Server Configuration:") - print("The server name is how your MCP client will identify this server.") - server_name = ( - input("Server name (press Enter for default 'm3'): ").strip() or "m3" - ) - - # Python path - print(f"\nDefault Python path: {self.default_python}") - python_path = input( - "Python executable path (press Enter for default): " - ).strip() - if not python_path: - python_path = self.default_python - - # Working directory - print(f"\nDefault working directory: {self.current_dir}") - working_directory = input( - "Working directory (press Enter for default): " - ).strip() - if not working_directory: - working_directory = str(self.current_dir) - - # Backend selection - simplified - print("\nChoose backend:") - print("1. SQLite (local database)") - print("2. BigQuery (Google Cloud)") - - while True: - backend_choice = input("Choose backend [1]: ").strip() or "1" - if backend_choice in ["1", "2"]: - break - print("Please enter 1 or 2") - - backend = "sqlite" if backend_choice == "1" else "bigquery" - - # Backend-specific configuration - db_path = None - project_id = None - - if backend == "sqlite": - print("\n๐Ÿ“ SQLite Configuration:") - from m3.config import get_default_database_path - - default_db_path = get_default_database_path("mimic-iv-demo") - if default_db_path is None: - raise ValueError(_DATABASE_PATH_ERROR_MSG) - print(f"Default database path: {default_db_path}") - - db_path = ( - input( - "SQLite database path (optional, press Enter to use default): " - ).strip() - or None - ) - - elif backend == "bigquery": - print("\nโ˜๏ธ BigQuery Configuration:") - project_id = None - while not project_id: - project_id = input( - "Google Cloud project ID (required for BigQuery): " - ).strip() - if not project_id: - print( - "โŒ Project ID is required when using BigQuery backend. Please enter your GCP project ID." - ) - print(f"โœ… Will use project: {project_id}") - - # OAuth2 Configuration - oauth2_enabled = False - oauth2_config = None - - print("\n๐Ÿ” OAuth2 Authentication (optional):") - enable_oauth2 = input("Enable OAuth2 authentication? [y/N]: ").strip().lower() - - if enable_oauth2 in ["y", "yes"]: - oauth2_enabled = True - oauth2_config = {} - - print("\nOAuth2 Configuration:") - oauth2_config["issuer_url"] = input( - "OAuth2 Issuer URL (e.g., https://auth.example.com): " - ).strip() - oauth2_config["audience"] = input( - "OAuth2 Audience (e.g., m3-api): " - ).strip() - oauth2_config["required_scopes"] = ( - input("Required Scopes [read:mimic-data]: ").strip() - or "read:mimic-data" - ) - - # Optional settings - jwks_url = input("JWKS URL (optional, auto-discovered if empty): ").strip() - if jwks_url: - oauth2_config["jwks_url"] = jwks_url - - rate_limit = input("Rate limit (requests per hour) [100]: ").strip() - if rate_limit and rate_limit.isdigit(): - oauth2_config["rate_limit_requests"] = rate_limit - - print("โœ… OAuth2 configuration added") - - # Additional environment variables - additional_env = {} - print("\n๐ŸŒ Additional environment variables (optional):") - print( - "Enter key=value pairs, one per line. Press Enter on empty line to finish." - ) - while True: - env_var = input("Environment variable: ").strip() - if not env_var: - break - if "=" in env_var: - key, value = env_var.split("=", 1) - additional_env[key.strip()] = value.strip() - print(f"โœ… Added: {key.strip()}={value.strip()}") - else: - print("โŒ Invalid format. Use key=value") - - return self.generate_config( - server_name=server_name, - python_path=python_path, - working_directory=working_directory, - backend=backend, - db_path=db_path, - project_id=project_id, - additional_env=additional_env if additional_env else None, - module_name="m3.mcp_server", - oauth2_enabled=oauth2_enabled, - oauth2_config=oauth2_config, - ) - - -def print_config_info(config: dict[str, Any]): - """Print configuration information.""" - # Get the first (and likely only) server configuration - server_name = next(iter(config["mcpServers"].keys())) - server_config = config["mcpServers"][server_name] - - print("\n๐Ÿ“‹ Configuration Summary:") - print("=" * 30) - print(f"๐Ÿท๏ธ Server name: {server_name}") - print(f"๐Ÿ Python path: {server_config['command']}") - print(f"๐Ÿ“ Working directory: {server_config['cwd']}") - print(f"๐Ÿ”ง Backend: {server_config['env'].get('M3_BACKEND', 'unknown')}") - - if "M3_DB_PATH" in server_config["env"]: - print(f"๐Ÿ’พ Database path: {server_config['env']['M3_DB_PATH']}") - elif server_config["env"].get("M3_BACKEND") == "sqlite": - # Show the default path when using SQLite backend - from m3.config import get_default_database_path - - default_path = get_default_database_path("mimic-iv-demo") - if default_path is None: - raise ValueError(_DATABASE_PATH_ERROR_MSG) - print(f"๐Ÿ’พ Database path: {default_path}") - - if "M3_PROJECT_ID" in server_config["env"]: - print(f"โ˜๏ธ Project ID: {server_config['env']['M3_PROJECT_ID']}") - - # Show additional env vars - additional_env = { - k: v - for k, v in server_config["env"].items() - if k - not in [ - "PYTHONPATH", - "M3_BACKEND", - "M3_DB_PATH", - "M3_PROJECT_ID", - "GOOGLE_CLOUD_PROJECT", - ] - } - if additional_env: - print("๐ŸŒ Additional environment variables:") - for key, value in additional_env.items(): - print(f" {key}: {value}") - - -def main(): - """Main function.""" - import argparse - - parser = argparse.ArgumentParser( - description="Generate MCP server configuration for M3", - formatter_class=argparse.RawDescriptionHelpFormatter, - epilog=""" -Examples: - # Interactive mode - python dynamic_mcp_config.py - - # Quick generation with defaults - python dynamic_mcp_config.py --quick - - # Custom configuration - python dynamic_mcp_config.py --python-path /usr/bin/python3 --backend bigquery --project-id my-project - - # Save to file - python dynamic_mcp_config.py --output config.json - """, - ) - - parser.add_argument( - "--quick", - action="store_true", - help="Generate configuration with defaults (non-interactive)", - ) - parser.add_argument( - "--server-name", default="m3", help="Name for the MCP server (default: m3)" - ) - parser.add_argument("--python-path", help="Path to Python executable") - parser.add_argument("--working-directory", help="Working directory for the server") - parser.add_argument( - "--backend", - choices=["sqlite", "bigquery"], - default="sqlite", - help="Backend to use (default: sqlite)", - ) - parser.add_argument( - "--db-path", help="Path to SQLite database (for sqlite backend)" - ) - parser.add_argument( - "--project-id", help="Google Cloud project ID (for bigquery backend)" - ) - parser.add_argument( - "--env", - action="append", - help="Additional environment variables (format: KEY=VALUE)", - ) - parser.add_argument( - "--output", "-o", help="Save configuration to file instead of printing" - ) - parser.add_argument( - "--pretty", - action="store_true", - default=True, - help="Pretty print JSON (default: True)", - ) - - args = parser.parse_args() - - # Validate backend-specific arguments - if args.backend == "sqlite" and args.project_id: - print( - "โŒ Error: --project-id can only be used with --backend bigquery", - file=sys.stderr, - ) - sys.exit(1) - - if args.backend == "bigquery" and args.db_path: - print( - "โŒ Error: --db-path can only be used with --backend sqlite", - file=sys.stderr, - ) - sys.exit(1) - - # Require project_id for BigQuery backend - if args.backend == "bigquery" and not args.project_id: - print( - "โŒ Error: --project-id is required when using --backend bigquery", - file=sys.stderr, - ) - sys.exit(1) - - generator = MCPConfigGenerator() - - try: - if args.quick: - # Quick mode with command line arguments - additional_env = {} - if args.env: - for env_var in args.env: - if "=" in env_var: - key, value = env_var.split("=", 1) - additional_env[key.strip()] = value.strip() - - config = generator.generate_config( - server_name=args.server_name, - python_path=args.python_path, - working_directory=args.working_directory, - backend=args.backend, - db_path=args.db_path, - project_id=args.project_id, - additional_env=additional_env if additional_env else None, - module_name="m3.mcp_server", - ) - else: - # Interactive mode - config = generator.interactive_config() - - # Print configuration info - print_config_info(config) - - # Output the configuration - json_output = json.dumps(config, indent=2 if args.pretty else None) - - if args.output: - # Save to file - with open(args.output, "w") as f: - f.write(json_output) - print(f"\n๐Ÿ’พ Configuration saved to: {args.output}") - else: - # Print to terminal - print("\n๐Ÿ“‹ MCP Configuration (copy and paste this into your MCP client):") - print("=" * 70) - print(json_output) - print("=" * 70) - print( - "\n๐Ÿ’ก Copy the JSON above and paste it into your MCP client configuration." - ) - - except Exception as e: - print(f"โŒ Error: {e}", file=sys.stderr) - sys.exit(1) - - -if __name__ == "__main__": - main() diff --git a/src/m3/mcp_client_configs/setup_claude_desktop.py b/src/m3/mcp_client_configs/setup_claude_desktop.py deleted file mode 100644 index e3d0ab9..0000000 --- a/src/m3/mcp_client_configs/setup_claude_desktop.py +++ /dev/null @@ -1,280 +0,0 @@ -""" -Setup script for M3 MCP Server with Claude Desktop. -Automatically configures Claude Desktop to use the M3 MCP server. -""" - -import json -import os -import shutil -from pathlib import Path - - -def get_claude_config_path(): - """Get the Claude Desktop configuration file path.""" - home = Path.home() - - # macOS path - claude_config = ( - home - / "Library" - / "Application Support" - / "Claude" - / "claude_desktop_config.json" - ) - if claude_config.parent.exists(): - return claude_config - - # Windows path - claude_config = ( - home / "AppData" / "Roaming" / "Claude" / "claude_desktop_config.json" - ) - if claude_config.parent.exists(): - return claude_config - - # Linux path - claude_config = home / ".config" / "Claude" / "claude_desktop_config.json" - if claude_config.parent.exists(): - return claude_config - - raise FileNotFoundError("Could not find Claude Desktop configuration directory") - - -def get_current_directory(): - """Get the current M3 project directory.""" - return Path(__file__).parent.parent.absolute() - - -def get_python_path(): - """Get the Python executable path.""" - # Try to use the current virtual environment - if "VIRTUAL_ENV" in os.environ: - venv_python = Path(os.environ["VIRTUAL_ENV"]) / "bin" / "python" - if venv_python.exists(): - return str(venv_python) - - # Fall back to system python - return shutil.which("python") or shutil.which("python3") or "python" - - -def create_mcp_config( - backend="sqlite", - db_path=None, - project_id=None, - oauth2_enabled=False, - oauth2_config=None, -): - """Create MCP server configuration.""" - current_dir = get_current_directory() - python_path = get_python_path() - - config = { - "mcpServers": { - "m3": { - "command": python_path, - "args": ["-m", "m3.mcp_server"], - "cwd": str(current_dir), - "env": {"PYTHONPATH": str(current_dir / "src"), "M3_BACKEND": backend}, - } - } - } - - # Add backend-specific environment variables - if backend == "sqlite" and db_path: - config["mcpServers"]["m3"]["env"]["M3_DB_PATH"] = db_path - elif backend == "bigquery" and project_id: - config["mcpServers"]["m3"]["env"]["M3_PROJECT_ID"] = project_id - config["mcpServers"]["m3"]["env"]["GOOGLE_CLOUD_PROJECT"] = project_id - - # Add OAuth2 configuration if enabled - if oauth2_enabled and oauth2_config: - config["mcpServers"]["m3"]["env"].update( - { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": oauth2_config.get("issuer_url", ""), - "M3_OAUTH2_AUDIENCE": oauth2_config.get("audience", ""), - "M3_OAUTH2_REQUIRED_SCOPES": oauth2_config.get( - "required_scopes", "read:mimic-data" - ), - "M3_OAUTH2_JWKS_URL": oauth2_config.get("jwks_url", ""), - } - ) - - # Optional OAuth2 settings - if oauth2_config.get("client_id"): - config["mcpServers"]["m3"]["env"]["M3_OAUTH2_CLIENT_ID"] = oauth2_config[ - "client_id" - ] - if oauth2_config.get("rate_limit_requests"): - config["mcpServers"]["m3"]["env"]["M3_OAUTH2_RATE_LIMIT_REQUESTS"] = str( - oauth2_config["rate_limit_requests"] - ) - - return config - - -def setup_claude_desktop( - backend="sqlite", - db_path=None, - project_id=None, - oauth2_enabled=False, - oauth2_config=None, -): - """Setup Claude Desktop with M3 MCP server.""" - try: - claude_config_path = get_claude_config_path() - print(f"Found Claude Desktop config at: {claude_config_path}") - - # Load existing config or create new one - existing_config = {} - if claude_config_path.exists() and claude_config_path.stat().st_size > 0: - try: - with open(claude_config_path) as f: - existing_config = json.load(f) - print("Loaded existing Claude Desktop configuration") - except json.JSONDecodeError: - print("Found corrupted config file, creating new configuration") - existing_config = {} - else: - print("Creating new Claude Desktop configuration") - - # Create MCP config - mcp_config = create_mcp_config( - backend, db_path, project_id, oauth2_enabled, oauth2_config - ) - - # Merge configurations - if "mcpServers" not in existing_config: - existing_config["mcpServers"] = {} - - existing_config["mcpServers"].update(mcp_config["mcpServers"]) - - # Ensure directory exists - claude_config_path.parent.mkdir(parents=True, exist_ok=True) - - # Write updated config - with open(claude_config_path, "w") as f: - json.dump(existing_config, f, indent=2) - - print("โœ… Successfully configured Claude Desktop!") - print(f"๐Ÿ“ Config file: {claude_config_path}") - print(f"๐Ÿ”ง Backend: {backend}") - - if backend == "sqlite": - db_path_display = db_path or "default (m3_data/databases/mimic_iv_demo.db)" - print(f"๐Ÿ’พ Database: {db_path_display}") - elif backend == "bigquery": - project_display = project_id or "physionet-data" - print(f"โ˜๏ธ Project: {project_display}") - - if oauth2_enabled: - print("๐Ÿ” OAuth2 Authentication: Enabled") - if oauth2_config: - print(f"๐Ÿ”— Issuer: {oauth2_config.get('issuer_url', 'Not configured')}") - print(f"๐Ÿ‘ฅ Audience: {oauth2_config.get('audience', 'Not configured')}") - print( - f"๐Ÿ”‘ Required Scopes: {oauth2_config.get('required_scopes', 'read:mimic-data')}" - ) - print("\nโš ๏ธ Security Notice:") - print(" - OAuth2 authentication is now required for all API calls") - print(" - Ensure you have a valid access token with the required scopes") - print( - " - Set M3_OAUTH2_TOKEN environment variable with your Bearer token" - ) - else: - print("๐Ÿ”“ OAuth2 Authentication: Disabled") - - print("\n๐Ÿ”„ Please restart Claude Desktop to apply changes") - - return True - - except Exception as e: - print(f"โŒ Error setting up Claude Desktop: {e}") - return False - - -def main(): - """Main setup function.""" - import argparse - - parser = argparse.ArgumentParser( - description="Setup M3 MCP Server with Claude Desktop" - ) - parser.add_argument( - "--backend", - choices=["sqlite", "bigquery"], - default="sqlite", - help="Backend to use (default: sqlite)", - ) - parser.add_argument( - "--db-path", help="Path to SQLite database (for sqlite backend)" - ) - parser.add_argument( - "--project-id", help="Google Cloud project ID (for bigquery backend)" - ) - parser.add_argument( - "--enable-oauth2", action="store_true", help="Enable OAuth2 authentication" - ) - parser.add_argument( - "--oauth2-issuer", help="OAuth2 issuer URL (e.g., https://auth.example.com)" - ) - parser.add_argument("--oauth2-audience", help="OAuth2 audience (e.g., m3-api)") - parser.add_argument( - "--oauth2-scopes", - default="read:mimic-data", - help="Required OAuth2 scopes (comma-separated)", - ) - - args = parser.parse_args() - - # Validate backend-specific arguments - if args.backend == "sqlite" and args.project_id: - print("โŒ Error: --project-id can only be used with --backend bigquery") - exit(1) - - if args.backend == "bigquery" and args.db_path: - print("โŒ Error: --db-path can only be used with --backend sqlite") - exit(1) - - # Require project_id for BigQuery backend - if args.backend == "bigquery" and not args.project_id: - print("โŒ Error: --project-id is required when using --backend bigquery") - exit(1) - - print("๐Ÿš€ Setting up M3 MCP Server with Claude Desktop...") - print(f"๐Ÿ“Š Backend: {args.backend}") - - # Prepare OAuth2 configuration if enabled - oauth2_config = None - if args.enable_oauth2: - if not args.oauth2_issuer or not args.oauth2_audience: - print( - "โŒ Error: --oauth2-issuer and --oauth2-audience are required when --enable-oauth2 is used" - ) - exit(1) - - oauth2_config = { - "issuer_url": args.oauth2_issuer, - "audience": args.oauth2_audience, - "required_scopes": args.oauth2_scopes, - } - - success = setup_claude_desktop( - backend=args.backend, - db_path=args.db_path, - project_id=args.project_id, - oauth2_enabled=args.enable_oauth2, - oauth2_config=oauth2_config, - ) - - if success: - print("\n๐ŸŽ‰ Setup complete! You can now use M3 tools in Claude Desktop.") - print( - "\n๐Ÿ’ก Try asking Claude: 'What tools do you have available for MIMIC-IV data?'" - ) - else: - print("\n๐Ÿ’ฅ Setup failed. Please check the error messages above.") - exit(1) - - -if __name__ == "__main__": - main() From 90f9c47b20b9ddd5c192ff9143ac61935d9e253b Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:45:26 +0100 Subject: [PATCH 06/15] feat(core): add M3 Tool base --- src/m3/core/tool/backend/backends/bigquery.py | 77 +++++++++++++++++++ src/m3/core/tool/backend/backends/sqlite.py | 70 +++++++++++++++++ src/m3/core/tool/backend/base.py | 40 ++++++++++ src/m3/core/tool/backend/registry.py | 14 ++++ src/m3/core/tool/base.py | 70 +++++++++++++++++ src/m3/core/tool/cli/base.py | 39 ++++++++++ 6 files changed, 310 insertions(+) create mode 100644 src/m3/core/tool/backend/backends/bigquery.py create mode 100644 src/m3/core/tool/backend/backends/sqlite.py create mode 100644 src/m3/core/tool/backend/base.py create mode 100644 src/m3/core/tool/backend/registry.py create mode 100644 src/m3/core/tool/base.py create mode 100644 src/m3/core/tool/cli/base.py diff --git a/src/m3/core/tool/backend/backends/bigquery.py b/src/m3/core/tool/backend/backends/bigquery.py new file mode 100644 index 0000000..0a39bc8 --- /dev/null +++ b/src/m3/core/tool/backend/backends/bigquery.py @@ -0,0 +1,77 @@ +import logging + +import pandas as pd +from beartype import beartype +from beartype.typing import Any, Dict + +from m3.core.tool.backend.base import BackendBase +from m3.core.utils.exceptions import M3InitializationError, M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +class BigQueryBackend(BackendBase): + def __init__(self, project: str) -> None: + self.project = project + self.client = None + + def to_dict(self) -> Dict[str, Any]: + return {"project": self.project} + + @classmethod + def from_dict(cls, params: Dict[str, Any]) -> "BigQueryBackend": + try: + return cls(project=params["project"]) + except KeyError as e: + raise ValueError(f"Missing required param: {e}") from e + + def initialize(self) -> None: + logger.debug(f"Initializing BigQuery backend for project: {self.project}") + try: + from google.cloud import bigquery + + self.client = bigquery.Client(project=self.project) + except ImportError as e: + raise M3InitializationError( + "google-cloud-bigquery package not installed", details=str(e) + ) from e + except Exception as e: + raise M3InitializationError( + "BigQuery client initialization failed", details=str(e) + ) from e + + def execute(self, operation: str) -> str: + if not self.client: + raise M3ValidationError("BigQuery backend not initialized") + try: + from google.cloud import bigquery + + job_config = bigquery.QueryJobConfig() + query_job = self.client.query(operation, job_config=job_config) + dataframe = query_job.to_dataframe() + return self._format_result(dataframe) + except Exception as e: + raise M3ValidationError(f"BigQuery execution failed: {e}") from e + + def _format_result(self, dataframe: pd.DataFrame) -> str: + if dataframe.empty: + return "No results found" + if len(dataframe) > 50: + result = dataframe.head(50).to_string(index=False) + result += f"\n... ({len(dataframe)} total rows, showing first 50)" + else: + result = dataframe.to_string(index=False) + return result + + def teardown(self) -> None: + self.client = None + + def __getstate__(self) -> dict: + state = super().__getstate__() + state["client"] = None + return state + + def __setstate__(self, state: dict) -> None: + super().__setstate__(state) + self.client = None diff --git a/src/m3/core/tool/backend/backends/sqlite.py b/src/m3/core/tool/backend/backends/sqlite.py new file mode 100644 index 0000000..38e8018 --- /dev/null +++ b/src/m3/core/tool/backend/backends/sqlite.py @@ -0,0 +1,70 @@ +import logging +import sqlite3 + +import pandas as pd +from beartype import beartype +from beartype.typing import Any, Dict + +from m3.core.tool.backend.base import BackendBase +from m3.core.utils.exceptions import M3InitializationError, M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +class SQLiteBackend(BackendBase): + def __init__(self, path: str) -> None: + self.path = path + self.connection: sqlite3.Connection | None = None + + def to_dict(self) -> Dict[str, Any]: + return {"path": self.path} + + @classmethod + def from_dict(cls, params: Dict[str, Any]) -> "SQLiteBackend": + try: + return cls(path=params["path"]) + except KeyError as e: + raise ValueError(f"Missing required param: {e}") from e + + def initialize(self) -> None: + logger.debug(f"Initializing SQLite backend at path: {self.path}") + try: + self.connection = sqlite3.connect(self.path) + except sqlite3.Error as e: + raise M3InitializationError( + f"SQLite connection failed for path {self.path}", details=str(e) + ) from e + + def execute(self, operation: str) -> str: + if not self.connection: + raise M3ValidationError("SQLite backend not initialized") + try: + dataframe = pd.read_sql_query(operation, self.connection) + return self._format_result(dataframe) + except sqlite3.Error as e: + raise M3ValidationError(f"SQLite execution failed: {e}") from e + + def _format_result(self, dataframe: pd.DataFrame) -> str: + if dataframe.empty: + return "No results found" + if len(dataframe) > 50: + result = dataframe.head(50).to_string(index=False) + result += f"\n... ({len(dataframe)} total rows, showing first 50)" + else: + result = dataframe.to_string(index=False) + return result + + def teardown(self) -> None: + if self.connection: + self.connection.close() + self.connection = None + + def __getstate__(self) -> dict: + state = super().__getstate__() + state["connection"] = None + return state + + def __setstate__(self, state: dict) -> None: + super().__setstate__(state) + self.connection = None diff --git a/src/m3/core/tool/backend/base.py b/src/m3/core/tool/backend/base.py new file mode 100644 index 0000000..e4914db --- /dev/null +++ b/src/m3/core/tool/backend/base.py @@ -0,0 +1,40 @@ +import logging +from abc import ABC, abstractmethod + +from beartype import beartype +from beartype.typing import Any, Dict + +logger = logging.getLogger(__name__) + + +@beartype +class BackendBase(ABC): + """Base abstract class for M3 tool various backends needs.""" + + @abstractmethod + def execute(self, query: str) -> str: + raise NotImplementedError("Subclasses must implement 'execute' method.") + + @abstractmethod + def initialize(self) -> None: + raise NotImplementedError("Subclasses must implement 'initialize' method.") + + @abstractmethod + def teardown(self) -> None: + raise NotImplementedError("Subclasses must implement 'teardown' method.") + + @abstractmethod + def to_dict(self) -> Dict[str, Any]: + raise NotImplementedError("Subclasses must implement 'to_dict' method.") + + @classmethod + @abstractmethod + def from_dict(cls, params: Dict[str, Any]) -> "BackendBase": + raise NotImplementedError("Subclasses must implement 'from_dict' method.") + + def __getstate__(self) -> dict: + state = self.__dict__.copy() + return state + + def __setstate__(self, state: dict) -> None: + self.__dict__.update(state) diff --git a/src/m3/core/tool/backend/registry.py b/src/m3/core/tool/backend/registry.py new file mode 100644 index 0000000..0b80a78 --- /dev/null +++ b/src/m3/core/tool/backend/registry.py @@ -0,0 +1,14 @@ +import logging + +from beartype.typing import Dict, Type + +from m3.core.tool.backend.backends.bigquery import BigQueryBackend +from m3.core.tool.backend.backends.sqlite import SQLiteBackend +from m3.core.tool.backend.base import BackendBase + +logger = logging.getLogger(__name__) + +BACKEND_REGISTRY: Dict[str, Type[BackendBase]] = { + "sqlite": SQLiteBackend, + "bigquery": BigQueryBackend, +} diff --git a/src/m3/core/tool/base.py b/src/m3/core/tool/base.py new file mode 100644 index 0000000..ce8c776 --- /dev/null +++ b/src/m3/core/tool/base.py @@ -0,0 +1,70 @@ +import logging +from abc import ABC, abstractmethod +from collections.abc import Callable + +from beartype import beartype +from beartype.typing import Any, Dict, Optional + +from m3.core.tool.backend.base import BackendBase +from m3.core.utils.exceptions import M3InitializationError + +logger = logging.getLogger(__name__) + + +@beartype +class BaseTool(ABC): + """Base class for M3 tools, providing shared structure for initialization and lifecycle.""" + + def __init__(self) -> None: + self.required_env_vars: Dict[str, Optional[str]] = {} + self.backends: Dict[str, BackendBase] = {} + + @abstractmethod + def actions(self) -> list[Callable]: + raise NotImplementedError( + "Subclasses must implement 'actions' method to return list of callable tools/functions." + ) + + def initialize(self) -> None: + try: + for backend in self.backends.values(): + backend.initialize() + except Exception as e: + raise M3InitializationError( + "Tool initialization failed during backend setup", details=str(e) + ) from e + self._initialize() + + def teardown(self) -> None: + self._teardown() + try: + for backend in self.backends.values(): + backend.teardown() + except Exception as e: + logger.error(f"Teardown error: {e}", exc_info=True) + + def post_load(self) -> None: + self._post_load() + self.initialize() + + def _initialize(self) -> None: # noqa: B027 + pass + + def _teardown(self) -> None: # noqa: B027 + pass + + def _post_load(self) -> None: # noqa: B027 + pass + + @abstractmethod + def to_dict(self) -> Dict[str, Any]: + raise NotImplementedError( + "Subclasses must implement 'to_dict' method to serialize initialization parameters." + ) + + @classmethod + @abstractmethod + def from_dict(cls, params: Dict[str, Any]) -> "BaseTool": + raise NotImplementedError( + "Subclasses must implement 'from_dict' method to reconstruct from serialized parameters." + ) diff --git a/src/m3/core/tool/cli/base.py b/src/m3/core/tool/cli/base.py new file mode 100644 index 0000000..1ed2912 --- /dev/null +++ b/src/m3/core/tool/cli/base.py @@ -0,0 +1,39 @@ +from abc import ABC, abstractmethod + +import typer +from beartype import beartype +from beartype.typing import Any, Dict, TypedDict + + +class ToolConfig(TypedDict): + env_vars: Dict[str, str] + tool_params: Dict[str, Any] + + +@beartype +class BaseToolCLI(ABC): + """Base class for M3 tool CLI implementations, defining tool-based command structure.""" + + @classmethod + @abstractmethod + def get_app(cls) -> typer.Typer: + raise NotImplementedError("Subclasses must provide Typer app.") + + @classmethod + @abstractmethod + def init(cls, *args: Any, **kwargs: Any) -> None: + raise NotImplementedError( + "Subclasses must implement init as tool starting block." + ) + + @classmethod + @abstractmethod + def configure(cls) -> ToolConfig: + raise NotImplementedError( + "Subclasses must implement configure method to return ToolConfig." + ) + + @classmethod + @abstractmethod + def status(cls, *args: Any, **kwargs: Any) -> None: + raise NotImplementedError("Subclasses must implement status.") From 6208ace83e4addca023de8a8c36c7b21b4756bfa Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:46:13 +0100 Subject: [PATCH 07/15] feat(core): add M3 starting MCP server script --- src/m3/core/server.py | 45 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 src/m3/core/server.py diff --git a/src/m3/core/server.py b/src/m3/core/server.py new file mode 100644 index 0000000..c7ccc78 --- /dev/null +++ b/src/m3/core/server.py @@ -0,0 +1,45 @@ +import logging +import os + +from beartype import beartype + +from m3.core.utils.exceptions import M3ValidationError +from m3.core.utils.logging import setup_logging +from m3.m3 import M3 + +logger = logging.getLogger(__name__) + + +@beartype +def main() -> None: + setup_logging(level=os.getenv("M3_LOG_LEVEL", "INFO")) + logger.info("Starting M3 MCP server...") + + config_path = os.getenv("M3_CONFIG_PATH") + if not config_path: + raise M3ValidationError( + "M3_CONFIG_PATH env var not set. Generate a config via CLI (e.g., 'm3 build --config-path m3_pipeline.json') and set it." + ) + + try: + m3 = M3.load(config_path) + logger.info(f"Loaded pipeline from config: {config_path}") + except (FileNotFoundError, M3ValidationError) as e: + logger.error(f"Failed to load config: {e}") + raise + + m3.build() + m3.run() + + +if __name__ == "__main__": + try: + main() + except M3ValidationError as e: + logger.error( + f"Validation failed: {e}. Generate and set M3_CONFIG_PATH via CLI." + ) + raise + except Exception as e: + logger.error(f"Failed to start M3 server: {e}") + raise From 094867fb11bbea326417493e8cfbe8434e98f3fb Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:48:01 +0100 Subject: [PATCH 08/15] refactor(tools): add MIMIC M3 Tool w/ auto-tools-registry --- src/m3/auth.py | 392 ---------- src/m3/data_io.py | 334 --------- src/m3/mcp_server.py | 690 ------------------ src/m3/tools/__init__.py | 0 src/m3/tools/mimic/__init__.py | 4 + src/m3/tools/mimic/cli.py | 283 +++++++ src/m3/tools/mimic/components/__init__.py | 4 + src/m3/tools/mimic/components/auth.py | 224 ++++++ src/m3/tools/mimic/components/data_io.py | 225 ++++++ src/m3/tools/mimic/components/utils.py | 88 +++ .../tools/mimic/configurations/datasets.yaml | 5 + .../tools/mimic/configurations/env_vars.yaml | 87 +++ .../tools/mimic/configurations/security.yaml | 40 + src/m3/tools/mimic/mimic.py | 512 +++++++++++++ src/m3/tools/registry.py | 75 ++ 15 files changed, 1547 insertions(+), 1416 deletions(-) delete mode 100644 src/m3/auth.py delete mode 100644 src/m3/data_io.py delete mode 100644 src/m3/mcp_server.py create mode 100644 src/m3/tools/__init__.py create mode 100644 src/m3/tools/mimic/__init__.py create mode 100644 src/m3/tools/mimic/cli.py create mode 100644 src/m3/tools/mimic/components/__init__.py create mode 100644 src/m3/tools/mimic/components/auth.py create mode 100644 src/m3/tools/mimic/components/data_io.py create mode 100644 src/m3/tools/mimic/components/utils.py create mode 100644 src/m3/tools/mimic/configurations/datasets.yaml create mode 100644 src/m3/tools/mimic/configurations/env_vars.yaml create mode 100644 src/m3/tools/mimic/configurations/security.yaml create mode 100644 src/m3/tools/mimic/mimic.py create mode 100644 src/m3/tools/registry.py diff --git a/src/m3/auth.py b/src/m3/auth.py deleted file mode 100644 index 4a1cab5..0000000 --- a/src/m3/auth.py +++ /dev/null @@ -1,392 +0,0 @@ -""" -OAuth2 Authentication Module for M3 MCP Server -Provides secure authentication using OAuth2 with JWT tokens. -""" - -import os -import time -from datetime import datetime, timedelta, timezone -from functools import wraps -from typing import Any -from urllib.parse import urljoin - -import httpx -import jwt -from cryptography.hazmat.primitives import serialization -from cryptography.hazmat.primitives.asymmetric import rsa - -from m3.config import logger - - -class AuthenticationError(Exception): - """Raised when authentication fails.""" - - pass - - -class TokenValidationError(Exception): - """Raised when token validation fails.""" - - pass - - -class OAuth2Config: - """OAuth2 configuration management.""" - - def __init__(self): - self.enabled = os.getenv("M3_OAUTH2_ENABLED", "false").lower() == "true" - - # OAuth2 Provider Configuration - self.issuer_url = os.getenv("M3_OAUTH2_ISSUER_URL", "") - self.client_id = os.getenv("M3_OAUTH2_CLIENT_ID", "") - self.client_secret = os.getenv("M3_OAUTH2_CLIENT_SECRET", "") - self.audience = os.getenv("M3_OAUTH2_AUDIENCE", "") - - # Scopes required for access - self.required_scopes = self._parse_scopes( - os.getenv("M3_OAUTH2_REQUIRED_SCOPES", "read:mimic-data") - ) - - # Token validation settings - self.validate_exp = ( - os.getenv("M3_OAUTH2_VALIDATE_EXP", "true").lower() == "true" - ) - self.validate_aud = ( - os.getenv("M3_OAUTH2_VALIDATE_AUD", "true").lower() == "true" - ) - self.validate_iss = ( - os.getenv("M3_OAUTH2_VALIDATE_ISS", "true").lower() == "true" - ) - - # JWKS settings - self.jwks_url = os.getenv("M3_OAUTH2_JWKS_URL", "") - self.jwks_cache_ttl = int( - os.getenv("M3_OAUTH2_JWKS_CACHE_TTL", "3600") - ) # 1 hour - - # Rate limiting - self.rate_limit_enabled = ( - os.getenv("M3_OAUTH2_RATE_LIMIT_ENABLED", "true").lower() == "true" - ) - self.rate_limit_requests = int( - os.getenv("M3_OAUTH2_RATE_LIMIT_REQUESTS", "100") - ) - self.rate_limit_window = int( - os.getenv("M3_OAUTH2_RATE_LIMIT_WINDOW", "3600") - ) # 1 hour - - # Cache for JWKS and validation - self._jwks_cache = {} - self._jwks_cache_time = 0 - self._rate_limit_cache = {} - - if self.enabled: - self._validate_config() - - def _parse_scopes(self, scopes_str: str) -> set[str]: - """Parse comma-separated scopes string.""" - return set(scope.strip() for scope in scopes_str.split(",") if scope.strip()) - - def _validate_config(self): - """Validate OAuth2 configuration.""" - if not self.issuer_url: - raise ValueError("M3_OAUTH2_ISSUER_URL is required when OAuth2 is enabled") - - if not self.audience: - raise ValueError("M3_OAUTH2_AUDIENCE is required when OAuth2 is enabled") - - if not self.jwks_url: - # Auto-discover JWKS URL from issuer - self.jwks_url = urljoin( - self.issuer_url.rstrip("/"), "/.well-known/jwks.json" - ) - - logger.info(f"OAuth2 authentication enabled with issuer: {self.issuer_url}") - - -class OAuth2Validator: - """OAuth2 token validator.""" - - def __init__(self, config: OAuth2Config): - self.config = config - self.http_client = httpx.Client(timeout=30.0) - - async def validate_token(self, token: str) -> dict[str, Any]: - """ - Validate an OAuth2 access token. - - Args: - token: The access token to validate - - Returns: - Decoded token claims - - Raises: - TokenValidationError: If token is invalid - """ - try: - # Get JWKS for token validation - jwks = await self._get_jwks() - - # Decode token header to get key ID - unverified_header = jwt.get_unverified_header(token) - kid = unverified_header.get("kid") - - if not kid: - raise TokenValidationError("Token missing key ID (kid)") - - # Find the appropriate key - key = self._find_key(jwks, kid) - if not key: - raise TokenValidationError(f"No key found for kid: {kid}") - - # Convert JWK to PEM format for verification - public_key = self._jwk_to_pem(key) - - # Validate token - payload = jwt.decode( - token, - public_key, - algorithms=["RS256", "ES256"], - audience=self.config.audience if self.config.validate_aud else None, - issuer=self.config.issuer_url if self.config.validate_iss else None, - options={ - "verify_exp": self.config.validate_exp, - "verify_aud": self.config.validate_aud, - "verify_iss": self.config.validate_iss, - }, - ) - - # Validate scopes - self._validate_scopes(payload) - - # Check rate limits - if self.config.rate_limit_enabled: - self._check_rate_limit(payload) - - return payload - - except jwt.ExpiredSignatureError: - raise TokenValidationError("Token has expired") - except jwt.InvalidAudienceError: - raise TokenValidationError("Invalid token audience") - except jwt.InvalidIssuerError: - raise TokenValidationError("Invalid token issuer") - except jwt.InvalidTokenError as e: - raise TokenValidationError(f"Invalid token: {e}") - except Exception as e: - raise TokenValidationError(f"Token validation failed: {e}") - - async def _get_jwks(self) -> dict[str, Any]: - """Get JWKS (JSON Web Key Set) from the OAuth2 provider.""" - current_time = time.time() - - # Check cache - if ( - self._jwks_cache - and current_time - self.config._jwks_cache_time < self.config.jwks_cache_ttl - ): - return self.config._jwks_cache - - # Fetch JWKS - try: - response = self.http_client.get(self.config.jwks_url) - response.raise_for_status() - jwks = response.json() - - # Cache the result - self.config._jwks_cache = jwks - self.config._jwks_cache_time = current_time - - return jwks - - except Exception as e: - raise TokenValidationError(f"Failed to fetch JWKS: {e}") - - def _find_key(self, jwks: dict[str, Any], kid: str) -> dict[str, Any] | None: - """Find a key in JWKS by key ID.""" - keys = jwks.get("keys", []) - for key in keys: - if key.get("kid") == kid: - return key - return None - - def _jwk_to_pem(self, jwk: dict[str, Any]) -> bytes: - """Convert JWK to PEM format.""" - try: - # Use python-jose for JWK to PEM conversion - from jose.utils import base64url_decode - - if jwk.get("kty") == "RSA": - # RSA key - n = base64url_decode(jwk["n"]) - e = base64url_decode(jwk["e"]) - - # Create RSA public key - public_numbers = rsa.RSAPublicNumbers( - int.from_bytes(e, byteorder="big"), - int.from_bytes(n, byteorder="big"), - ) - public_key = public_numbers.public_key() - - # Convert to PEM - pem = public_key.public_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PublicFormat.SubjectPublicKeyInfo, - ) - return pem - else: - raise TokenValidationError(f"Unsupported key type: {jwk.get('kty')}") - - except Exception as e: - raise TokenValidationError(f"Failed to convert JWK to PEM: {e}") - - def _validate_scopes(self, payload: dict[str, Any]): - """Validate that token has required scopes.""" - if not self.config.required_scopes: - return - - token_scopes = set() - - # Check different possible scope claims - scope_claim = payload.get("scope", "") - if isinstance(scope_claim, str): - token_scopes = set(scope_claim.split()) - elif isinstance(scope_claim, list): - token_scopes = set(scope_claim) - - # Also check 'scp' claim (some providers use this) - scp_claim = payload.get("scp", []) - if isinstance(scp_claim, list): - token_scopes.update(scp_claim) - - # Check if required scopes are present - missing_scopes = self.config.required_scopes - token_scopes - if missing_scopes: - raise TokenValidationError(f"Missing required scopes: {missing_scopes}") - - def _check_rate_limit(self, payload: dict[str, Any]): - """Check rate limits for the user.""" - user_id = payload.get("sub", "unknown") - current_time = time.time() - window_start = current_time - self.config.rate_limit_window - - # Clean old entries - user_requests = self.config._rate_limit_cache.get(user_id, []) - user_requests = [ - req_time for req_time in user_requests if req_time > window_start - ] - - # Check if limit exceeded - if len(user_requests) >= self.config.rate_limit_requests: - raise TokenValidationError("Rate limit exceeded") - - # Add current request - user_requests.append(current_time) - self.config._rate_limit_cache[user_id] = user_requests - - -# Global instances -_oauth2_config = None -_oauth2_validator = None - - -def init_oauth2(): - """Initialize OAuth2 authentication.""" - global _oauth2_config, _oauth2_validator - - _oauth2_config = OAuth2Config() - if _oauth2_config.enabled: - _oauth2_validator = OAuth2Validator(_oauth2_config) - logger.info("OAuth2 authentication initialized") - else: - logger.info("OAuth2 authentication disabled") - - -def require_oauth2(func): - """Decorator to require OAuth2 authentication for MCP tools.""" - - @wraps(func) - def wrapper(*args, **kwargs): - if not _oauth2_config or not _oauth2_config.enabled: - # If OAuth2 is disabled, allow access - return func(*args, **kwargs) - - # Extract token from environment (in real implementation, this would come from request headers) - token = os.getenv("M3_OAUTH2_TOKEN", "") - if not token: - return "Error: Missing OAuth2 access token" - - # Remove "Bearer " prefix if present - if token.startswith("Bearer "): - token = token[7:] - - try: - # For synchronous compatibility, we'll do a simple validation - # In a real async environment, this would be await _oauth2_validator.validate_token(token) - - # Basic token structure check (JWT has 3 parts separated by dots) - if not token or len(token.split(".")) != 3: - return "Error: Invalid token format" - - # In production, you would validate the token here - # For now, we'll do a basic check and assume the token is valid if OAuth2 is properly configured - - return func(*args, **kwargs) - - except Exception as e: - logger.error(f"OAuth2 authentication error: {e}") - return "Error: Authentication system error" - - return wrapper - - -def get_oauth2_config() -> OAuth2Config | None: - """Get the current OAuth2 configuration.""" - return _oauth2_config - - -def is_oauth2_enabled() -> bool: - """Check if OAuth2 authentication is enabled.""" - return _oauth2_config is not None and _oauth2_config.enabled - - -def generate_test_token( - issuer: str = "https://test-issuer.example.com", - audience: str = "m3-api", - subject: str = "test-user", - scopes: list[str] | None = None, - expires_in: int = 3600, -) -> str: - """ - Generate a test JWT token for development/testing. - - WARNING: This should only be used for testing! - """ - if scopes is None: - scopes = ["read:mimic-data"] - - now = datetime.now(timezone.utc) - claims = { - "iss": issuer, - "aud": audience, - "sub": subject, - "iat": int(now.timestamp()), - "exp": int((now + timedelta(seconds=expires_in)).timestamp()), - "scope": " ".join(scopes), - "email": f"{subject}@example.com", - } - - # Generate a test key (DO NOT use in production) - private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) - - private_pem = private_key.private_bytes( - encoding=serialization.Encoding.PEM, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption(), - ) - - # Sign the token - token = jwt.encode(claims, private_pem, algorithm="RS256") - - return token diff --git a/src/m3/data_io.py b/src/m3/data_io.py deleted file mode 100644 index dffb973..0000000 --- a/src/m3/data_io.py +++ /dev/null @@ -1,334 +0,0 @@ -from pathlib import Path -from urllib.parse import urljoin, urlparse - -import polars as pl -import requests -import typer -from bs4 import BeautifulSoup - -from m3.config import get_dataset_config, get_dataset_raw_files_path, logger - -COMMON_USER_AGENT = ( - "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 " - "(KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" -) - - -def _download_single_file( - url: str, target_filepath: Path, session: requests.Session -) -> bool: - """Downloads a single file with progress tracking.""" - logger.debug(f"Attempting to download {url} to {target_filepath}...") - try: - response = session.get(url, stream=True, timeout=60) - response.raise_for_status() - total_size = int(response.headers.get("content-length", 0)) - file_display_name = target_filepath.name - - target_filepath.parent.mkdir(parents=True, exist_ok=True) - with ( - open(target_filepath, "wb") as f, - typer.progressbar( - length=total_size, label=f"Downloading {file_display_name}" - ) as progress, - ): - for chunk in response.iter_content(chunk_size=8192): # Standard chunk size - if chunk: - f.write(chunk) - progress.update(len(chunk)) - logger.info(f"Successfully downloaded: {file_display_name}") - return True - except requests.exceptions.HTTPError as e: - status = e.response.status_code - if status == 404: - logger.error(f"Download failed (404 Not Found): {url}.") - else: - logger.error(f"HTTP error {status} downloading {url}: {e.response.reason}") - except requests.exceptions.Timeout: - logger.error(f"Timeout occurred while downloading {url}.") - except requests.exceptions.RequestException as e: - logger.error(f"A network or request error occurred downloading {url}: {e}") - except OSError as e: - logger.error(f"File system error writing {target_filepath}: {e}") - - # If download failed, attempt to remove partially downloaded file - if target_filepath.exists(): - try: - target_filepath.unlink() - except OSError as e: - logger.error(f"Could not remove incomplete file {target_filepath}: {e}") - return False - - -def _scrape_urls_from_html_page( - page_url: str, session: requests.Session, file_suffix: str = ".csv.gz" -) -> list[str]: - """Scrapes a webpage for links ending with a specific suffix.""" - found_urls = [] - logger.debug(f"Scraping for '{file_suffix}' links on page: {page_url}") - try: - page_response = session.get(page_url, timeout=30) - page_response.raise_for_status() - soup = BeautifulSoup(page_response.content, "html.parser") - for link_tag in soup.find_all("a", href=True): - href_path = link_tag["href"] - # Basic validation of the link - if ( - href_path.endswith(file_suffix) - and not href_path.startswith(("?", "#")) - and ".." not in href_path - ): - absolute_url = urljoin(page_url, href_path) - found_urls.append(absolute_url) - except requests.exceptions.RequestException as e: - logger.error(f"Could not access or parse page {page_url} for scraping: {e}") - return found_urls - - -def _download_dataset_files( - dataset_name: str, dataset_config: dict, raw_files_root_dir: Path -) -> bool: - """Downloads all relevant files for a dataset based on its configuration.""" - base_listing_url = dataset_config["file_listing_url"] - subdirs_to_scan = dataset_config.get("subdirectories_to_scan", []) - - logger.info( - f"Preparing to download {dataset_name} files from base URL: {base_listing_url}" - ) - session = requests.Session() - session.headers.update({"User-Agent": COMMON_USER_AGENT}) - - all_files_to_process = [] # List of (url, local_target_path) - - for subdir_name in subdirs_to_scan: - subdir_listing_url = urljoin(base_listing_url, f"{subdir_name}/") - logger.info(f"Scanning subdirectory for CSVs: {subdir_listing_url}") - csv_urls_in_subdir = _scrape_urls_from_html_page(subdir_listing_url, session) - - if not csv_urls_in_subdir: - logger.warning( - f"No .csv.gz files found in subdirectory: {subdir_listing_url}" - ) - continue - - for file_url in csv_urls_in_subdir: - url_path_obj = Path(urlparse(file_url).path) - base_listing_url_path_obj = Path(urlparse(base_listing_url).path) - relative_file_path: Path - - try: - # Attempt to make file path relative to base URL's path part - if url_path_obj.as_posix().startswith( - base_listing_url_path_obj.as_posix() - ): - relative_file_path = url_path_obj.relative_to( - base_listing_url_path_obj - ) - else: - # Fallback if URL structure is unexpected - # (e.g., flat list of files not matching base structure) - logger.warning( - f"Path calculation fallback for {url_path_obj} vs " - f"{base_listing_url_path_obj}. " - f"Using {Path(subdir_name) / url_path_obj.name}" - ) - relative_file_path = Path(subdir_name) / url_path_obj.name - except ( - ValueError - ) as e_rel: # Handles cases where relative_to is not possible - logger.error( - f"Path relative_to error for {url_path_obj} from " - f"{base_listing_url_path_obj}: {e_rel}. " - f"Defaulting to {Path(subdir_name) / url_path_obj.name}" - ) - relative_file_path = Path(subdir_name) / url_path_obj.name - - local_target_path = raw_files_root_dir / relative_file_path - all_files_to_process.append((file_url, local_target_path)) - - if not all_files_to_process: - logger.error( - f"No '.csv.gz' download links found after scanning {base_listing_url} " - f"and its subdirectories {subdirs_to_scan} for dataset '{dataset_name}'." - ) - return False - - # Deduplicate and sort for consistent processing order - unique_files_to_process = sorted( - list(set(all_files_to_process)), key=lambda x: x[1] - ) - logger.info( - f"Found {len(unique_files_to_process)} unique '.csv.gz' files to download " - f"for {dataset_name}." - ) - - downloaded_count = 0 - for file_url, target_filepath in unique_files_to_process: - if not _download_single_file(file_url, target_filepath, session): - logger.error( - f"Critical download failed for '{target_filepath.name}'. " - "Aborting dataset download." - ) - return False # Stop if any single download fails - downloaded_count += 1 - - # Success only if all identified files were downloaded - return downloaded_count == len(unique_files_to_process) - - -def _load_csv_with_robust_parsing(csv_file_path: Path, table_name: str) -> pl.DataFrame: - """ - Load a CSV file with proper type inference by scanning the entire file. - """ - df = pl.read_csv( - source=csv_file_path, - infer_schema_length=None, # Scan entire file for proper type inference - try_parse_dates=True, - ignore_errors=False, - null_values=["", "NULL", "null", "\\N", "NA"], - ) - - # Log empty columns (this is normal, not an error) - if df.height > 0: - empty_columns = [col for col in df.columns if df[col].is_null().all()] - if empty_columns: - logger.info( - f" Table '{table_name}': Found {len(empty_columns)} empty column(s): " - f"{', '.join(empty_columns[:5])}" - + ( - f" (and {len(empty_columns) - 5} more)" - if len(empty_columns) > 5 - else "" - ) - ) - - return df - - -def _etl_csv_collection_to_sqlite(csv_source_dir: Path, db_target_path: Path) -> bool: - """Loads all .csv.gz files from a directory structure into an SQLite database.""" - db_target_path.parent.mkdir(parents=True, exist_ok=True) - # Polars uses this format for SQLite connections - db_connection_uri = f"sqlite:///{db_target_path.resolve()}" - logger.info( - f"Starting ETL: loading CSVs from '{csv_source_dir}' to SQLite DB " - f"at '{db_target_path}'" - ) - - csv_file_paths = list(csv_source_dir.rglob("*.csv.gz")) - if not csv_file_paths: - logger.error( - "ETL Error: No .csv.gz files found (recursively) in source directory: " - f"{csv_source_dir}" - ) - return False - - successfully_loaded_count = 0 - files_with_errors = [] - logger.info(f"Found {len(csv_file_paths)} .csv.gz files for ETL process.") - - for i, csv_file_path in enumerate(csv_file_paths): - # Generate table name from file path relative to the source directory - # e.g., source_dir/hosp/admissions.csv.gz -> hosp_admissions - relative_path = csv_file_path.relative_to(csv_source_dir) - table_name_parts = [part.lower() for part in relative_path.parts] - table_name = ( - "_".join(table_name_parts) - .replace(".csv.gz", "") - .replace("-", "_") - .replace(".", "_") - ) - - logger.info( - f"[{i + 1}/{len(csv_file_paths)}] ETL: Processing '{relative_path}' " - f"into SQLite table '{table_name}'..." - ) - - try: - # Use the robust parsing function - df = _load_csv_with_robust_parsing(csv_file_path, table_name) - - df.write_database( - table_name=table_name, - connection=db_connection_uri, - if_table_exists="replace", # Overwrite table if it exists - engine="sqlalchemy", # Recommended engine for Polars with SQLite - ) - logger.info( - f" Successfully loaded '{relative_path}' into table '{table_name}' " - f"({df.height} rows, {df.width} columns)." - ) - successfully_loaded_count += 1 - - except Exception as e: - err_msg = ( - f"Unexpected error during ETL for '{relative_path}' " - f"(target table '{table_name}'): {e}" - ) - logger.error(err_msg, exc_info=True) - files_with_errors.append(f"{relative_path}: {e!s}") - # Continue to process other files even if one fails - - if files_with_errors: - logger.warning( - "ETL completed with errors during processing for " - f"{len(files_with_errors)} file(s):" - ) - for detail in files_with_errors: - logger.warning(f" - {detail}") - - # Strict success: all found files must be loaded without Polars/DB errors. - if successfully_loaded_count == len(csv_file_paths): - logger.info( - f"All {len(csv_file_paths)} CSV files successfully processed & loaded into " - f"{db_target_path}." - ) - return True - elif successfully_loaded_count > 0: - logger.warning( - f"Partially completed ETL: Loaded {successfully_loaded_count} out of " - f"{len(csv_file_paths)} files. Some files encountered errors during " - "their individual processing and were not loaded." - ) - return False - else: # No files were successfully loaded - logger.error( - "ETL process failed: No CSV files were successfully loaded into the " - f"database from {csv_source_dir}." - ) - return False - - -def initialize_dataset(dataset_name: str, db_target_path: Path) -> bool: - """Initializes a dataset: downloads files and loads them into a database.""" - dataset_config = get_dataset_config(dataset_name) - if not dataset_config: - logger.error(f"Configuration for dataset '{dataset_name}' not found.") - return False - - raw_files_root_dir = get_dataset_raw_files_path(dataset_name) - raw_files_root_dir.mkdir(parents=True, exist_ok=True) - - logger.info(f"Starting initialization for dataset: {dataset_name}") - download_ok = _download_dataset_files( - dataset_name, dataset_config, raw_files_root_dir - ) - - if not download_ok: - logger.error( - f"Download phase failed for dataset '{dataset_name}'. ETL skipped." - ) - return False - - logger.info(f"Download phase complete for '{dataset_name}'. Starting ETL phase.") - etl_ok = _etl_csv_collection_to_sqlite(raw_files_root_dir, db_target_path) - - if not etl_ok: - logger.error(f"ETL phase failed for dataset '{dataset_name}'.") - return False - - logger.info( - f"Dataset '{dataset_name}' successfully initialized. " - f"Database at: {db_target_path}" - ) - return True diff --git a/src/m3/mcp_server.py b/src/m3/mcp_server.py deleted file mode 100644 index cc8bd91..0000000 --- a/src/m3/mcp_server.py +++ /dev/null @@ -1,690 +0,0 @@ -""" -M3 MCP Server - MIMIC-IV + MCP + Models -Provides MCP tools for querying MIMIC-IV data via SQLite or BigQuery. -""" - -import os -import sqlite3 -from pathlib import Path - -import pandas as pd -import sqlparse -from fastmcp import FastMCP - -from m3.auth import init_oauth2, require_oauth2 -from m3.config import get_default_database_path - -# Create FastMCP server instance -mcp = FastMCP("m3") - -# Global variables for backend configuration -_backend = None -_db_path = None -_bq_client = None -_project_id = None - - -def _validate_limit(limit: int) -> bool: - """Validate limit parameter to prevent resource exhaustion.""" - return isinstance(limit, int) and 0 < limit <= 1000 - - -def _is_safe_query(sql_query: str, internal_tool: bool = False) -> tuple[bool, str]: - """Secure SQL validation - blocks injection attacks, allows legitimate queries.""" - try: - if not sql_query or not sql_query.strip(): - return False, "Empty query" - - # Parse SQL to validate structure - parsed = sqlparse.parse(sql_query.strip()) - if not parsed: - return False, "Invalid SQL syntax" - - # Block multiple statements (main injection vector) - if len(parsed) > 1: - return False, "Multiple statements not allowed" - - statement = parsed[0] - statement_type = statement.get_type() - - # Allow SELECT and PRAGMA (PRAGMA is needed for schema exploration) - if statement_type not in ( - "SELECT", - "UNKNOWN", - ): # PRAGMA shows as UNKNOWN in sqlparse - return False, "Only SELECT and PRAGMA queries allowed" - - # Check if it's a PRAGMA statement (these are safe for schema exploration) - sql_upper = sql_query.strip().upper() - if sql_upper.startswith("PRAGMA"): - return True, "Safe PRAGMA statement" - - # For SELECT statements, block dangerous injection patterns - if statement_type == "SELECT": - # Block dangerous write operations within SELECT - dangerous_keywords = { - "INSERT", - "UPDATE", - "DELETE", - "DROP", - "CREATE", - "ALTER", - "TRUNCATE", - "REPLACE", - "MERGE", - "EXEC", - "EXECUTE", - } - - for keyword in dangerous_keywords: - if f" {keyword} " in f" {sql_upper} ": - return False, f"Write operation not allowed: {keyword}" - - # Block common injection patterns that are rarely used in legitimate analytics - injection_patterns = [ - # Classic SQL injection patterns - ("1=1", "Classic injection pattern"), - ("OR 1=1", "Boolean injection pattern"), - ("AND 1=1", "Boolean injection pattern"), - ("OR '1'='1'", "String injection pattern"), - ("AND '1'='1'", "String injection pattern"), - ("WAITFOR", "Time-based injection"), - ("SLEEP(", "Time-based injection"), - ("BENCHMARK(", "Time-based injection"), - ("LOAD_FILE(", "File access injection"), - ("INTO OUTFILE", "File write injection"), - ("INTO DUMPFILE", "File write injection"), - ] - - for pattern, description in injection_patterns: - if pattern in sql_upper: - return False, f"Injection pattern detected: {description}" - - # Context-aware protection: Block suspicious table/column names not in medical databases - suspicious_names = [ - "PASSWORD", - "ADMIN", - "USER", - "LOGIN", - "AUTH", - "TOKEN", - "CREDENTIAL", - "SECRET", - "KEY", - "HASH", - "SALT", - "SESSION", - "COOKIE", - ] - - for name in suspicious_names: - if name in sql_upper: - return ( - False, - f"Suspicious identifier detected: {name} (not medical data)", - ) - - return True, "Safe" - - except Exception as e: - return False, f"Validation error: {e}" - - -def _init_backend(): - """Initialize the backend based on environment variables.""" - global _backend, _db_path, _bq_client, _project_id - - # Initialize OAuth2 authentication - init_oauth2() - - _backend = os.getenv("M3_BACKEND", "sqlite") - - if _backend == "sqlite": - _db_path = os.getenv("M3_DB_PATH") - if not _db_path: - # Use default database path - _db_path = get_default_database_path("mimic-iv-demo") - - # Ensure the database exists - if not Path(_db_path).exists(): - raise FileNotFoundError(f"SQLite database not found: {_db_path}") - - elif _backend == "bigquery": - try: - from google.cloud import bigquery - except ImportError: - raise ImportError( - "BigQuery dependencies not found. Install with: pip install google-cloud-bigquery" - ) - - # User's GCP project ID for authentication and billing - # MIMIC-IV data resides in the public 'physionet-data' project - _project_id = os.getenv("M3_PROJECT_ID", "physionet-data") - try: - _bq_client = bigquery.Client(project=_project_id) - except Exception as e: - raise RuntimeError(f"Failed to initialize BigQuery client: {e}") - - else: - raise ValueError(f"Unsupported backend: {_backend}") - - -# Initialize backend when module is imported -_init_backend() - - -def _get_backend_info() -> str: - """Get current backend information for display in responses.""" - if _backend == "sqlite": - return f"๐Ÿ”ง **Current Backend:** SQLite (local database)\n๐Ÿ“ **Database Path:** {_db_path}\n" - else: - return f"๐Ÿ”ง **Current Backend:** BigQuery (cloud database)\nโ˜๏ธ **Project ID:** {_project_id}\n" - - -# ========================================== -# INTERNAL QUERY EXECUTION FUNCTIONS -# ========================================== -# These functions perform the actual database operations -# and are called by the MCP tools. This prevents MCP tools -# from calling other MCP tools, which violates the MCP protocol. - - -def _execute_sqlite_query(sql_query: str) -> str: - """Execute SQLite query - internal function.""" - try: - conn = sqlite3.connect(_db_path) - try: - df = pd.read_sql_query(sql_query, conn) - - if df.empty: - return "No results found" - - # Limit output size - if len(df) > 50: - result = df.head(50).to_string(index=False) - result += f"\n... ({len(df)} total rows, showing first 50)" - else: - result = df.to_string(index=False) - - return result - finally: - conn.close() - except Exception as e: - # Re-raise the exception so the calling function can handle it with enhanced guidance - raise e - - -def _execute_bigquery_query(sql_query: str) -> str: - """Execute BigQuery query - internal function.""" - try: - from google.cloud import bigquery - - job_config = bigquery.QueryJobConfig() - query_job = _bq_client.query(sql_query, job_config=job_config) - df = query_job.to_dataframe() - - if df.empty: - return "No results found" - - # Limit output size - if len(df) > 50: - result = df.head(50).to_string(index=False) - result += f"\n... ({len(df)} total rows, showing first 50)" - else: - result = df.to_string(index=False) - - return result - - except Exception as e: - # Re-raise the exception so the calling function can handle it with enhanced guidance - raise e - - -def _execute_query_internal(sql_query: str) -> str: - """Internal query execution function that handles backend routing.""" - # Security check - is_safe, message = _is_safe_query(sql_query) - if not is_safe: - if "describe" in sql_query.lower() or "show" in sql_query.lower(): - return f"""โŒ **Security Error:** {message} - - ๐Ÿ” **For table structure:** Use `get_table_info('table_name')` instead of DESCRIBE - ๐Ÿ“‹ **Why this is better:** Shows columns, types, AND sample data to understand the actual data - - ๐Ÿ’ก **Recommended workflow:** - 1. `get_database_schema()` โ† See available tables - 2. `get_table_info('table_name')` โ† Explore structure - 3. `execute_mimic_query('SELECT ...')` โ† Run your analysis""" - - return f"โŒ **Security Error:** {message}\n\n๐Ÿ’ก **Tip:** Only SELECT statements are allowed for data analysis." - - try: - if _backend == "sqlite": - return _execute_sqlite_query(sql_query) - else: # bigquery - return _execute_bigquery_query(sql_query) - except Exception as e: - error_msg = str(e).lower() - - # Provide specific, actionable error guidance - suggestions = [] - - if "no such table" in error_msg or "table not found" in error_msg: - suggestions.append( - "๐Ÿ” **Table name issue:** Use `get_database_schema()` to see exact table names" - ) - suggestions.append( - f"๐Ÿ“‹ **Backend-specific naming:** {_backend} has specific table naming conventions" - ) - suggestions.append( - "๐Ÿ’ก **Quick fix:** Check if the table name matches exactly (case-sensitive)" - ) - - if "no such column" in error_msg or "column not found" in error_msg: - suggestions.append( - "๐Ÿ” **Column name issue:** Use `get_table_info('table_name')` to see available columns" - ) - suggestions.append( - "๐Ÿ“ **Common issue:** Column might be named differently (e.g., 'anchor_age' not 'age')" - ) - suggestions.append( - "๐Ÿ‘€ **Check sample data:** `get_table_info()` shows actual column names and sample values" - ) - - if "syntax error" in error_msg: - suggestions.append( - "๐Ÿ“ **SQL syntax issue:** Check quotes, commas, and parentheses" - ) - suggestions.append( - f"๐ŸŽฏ **Backend syntax:** Verify your SQL works with {_backend}" - ) - suggestions.append( - "๐Ÿ’ญ **Try simpler:** Start with `SELECT * FROM table_name LIMIT 5`" - ) - - if "describe" in error_msg.lower() or "show" in error_msg.lower(): - suggestions.append( - "๐Ÿ” **Schema exploration:** Use `get_table_info('table_name')` instead of DESCRIBE" - ) - suggestions.append( - "๐Ÿ“‹ **Better approach:** `get_table_info()` shows columns AND sample data" - ) - - if not suggestions: - suggestions.append( - "๐Ÿ” **Start exploration:** Use `get_database_schema()` to see available tables" - ) - suggestions.append( - "๐Ÿ“‹ **Check structure:** Use `get_table_info('table_name')` to understand the data" - ) - - suggestion_text = "\n".join(f" {s}" for s in suggestions) - - return f"""โŒ **Query Failed:** {e} - -๐Ÿ› ๏ธ **How to fix this:** -{suggestion_text} - -๐ŸŽฏ **Quick Recovery Steps:** -1. `get_database_schema()` โ† See what tables exist -2. `get_table_info('your_table')` โ† Check exact column names -3. Retry your query with correct names - -๐Ÿ“š **Current Backend:** {_backend} - table names and syntax are backend-specific""" - - -# ========================================== -# MCP TOOLS - PUBLIC API -# ========================================== -# These are the tools exposed via MCP protocol. -# They should NEVER call other MCP tools - only internal functions. - - -@mcp.tool() -@require_oauth2 -def get_database_schema() -> str: - """๐Ÿ” Discover what data is available in the MIMIC-IV database. - - **When to use:** Start here when you need to understand what tables exist, or when someone asks about data that might be in multiple tables. - - **What this does:** Shows all available tables so you can identify which ones contain the data you need. - - **Next steps after using this:** - - If you see relevant tables, use `get_table_info(table_name)` to explore their structure - - Common tables: `patients` (demographics), `admissions` (hospital stays), `icustays` (ICU data), `labevents` (lab results) - - Returns: - List of all available tables in the database with current backend info - """ - if _backend == "sqlite": - query = "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" - result = _execute_query_internal(query) - return f"{_get_backend_info()}\n๐Ÿ“‹ **Available Tables:**\n{result}" - else: # bigquery - # Show fully qualified table names that are ready to copy-paste into queries - query = """ - SELECT CONCAT('`physionet-data.mimiciv_3_1_hosp.', table_name, '`') as query_ready_table_name - FROM `physionet-data.mimiciv_3_1_hosp.INFORMATION_SCHEMA.TABLES` - UNION ALL - SELECT CONCAT('`physionet-data.mimiciv_3_1_icu.', table_name, '`') as query_ready_table_name - FROM `physionet-data.mimiciv_3_1_icu.INFORMATION_SCHEMA.TABLES` - ORDER BY query_ready_table_name - """ - result = _execute_query_internal(query) - return f"{_get_backend_info()}\n๐Ÿ“‹ **Available Tables (query-ready names):**\n{result}\n\n๐Ÿ’ก **Copy-paste ready:** These table names can be used directly in your SQL queries!" - - -@mcp.tool() -@require_oauth2 -def get_table_info(table_name: str, show_sample: bool = True) -> str: - """๐Ÿ“‹ Explore a specific table's structure and see sample data. - - **When to use:** After you know which table you need (from `get_database_schema()`), use this to understand the columns and data format. - - **What this does:** - - Shows column names, types, and constraints - - Displays sample rows so you understand the actual data format - - Helps you write accurate SQL queries - - **Pro tip:** Always look at sample data! It shows you the actual values, date formats, and data patterns. - - Args: - table_name: Exact table name from the schema (case-sensitive). Can be simple name or fully qualified BigQuery name. - show_sample: Whether to include sample rows (default: True, recommended) - - Returns: - Complete table structure with sample data to help you write queries - """ - backend_info = _get_backend_info() - - if _backend == "sqlite": - # Get column information - pragma_query = f"PRAGMA table_info({table_name})" - try: - result = _execute_sqlite_query(pragma_query) - if "error" in result.lower(): - return f"{backend_info}โŒ Table '{table_name}' not found. Use get_database_schema() to see available tables." - - info_result = f"{backend_info}๐Ÿ“‹ **Table:** {table_name}\n\n**Column Information:**\n{result}" - - if show_sample: - sample_query = f"SELECT * FROM {table_name} LIMIT 3" - sample_result = _execute_sqlite_query(sample_query) - info_result += ( - f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" - ) - - return info_result - except Exception as e: - return f"{backend_info}โŒ Error examining table '{table_name}': {e}\n\n๐Ÿ’ก Use get_database_schema() to see available tables." - - else: # bigquery - # Handle both simple names (patients) and fully qualified names (`physionet-data.mimiciv_3_1_hosp.patients`) - # Detect qualified names by content: dots + physionet pattern - if "." in table_name and "physionet-data" in table_name: - # Qualified name (format-agnostic: works with or without backticks) - clean_name = table_name.strip("`") - full_table_name = f"`{clean_name}`" - parts = clean_name.split(".") - - # Validate BigQuery qualified name format: project.dataset.table - if len(parts) != 3: - error_msg = ( - f"{_get_backend_info()}โŒ **Invalid qualified table name:** `{table_name}`\n\n" - "**Expected format:** `project.dataset.table`\n" - "**Example:** `physionet-data.mimiciv_3_1_hosp.diagnoses_icd`\n\n" - "**Available MIMIC-IV datasets:**\n" - "- `physionet-data.mimiciv_3_1_hosp.*` (hospital module)\n" - "- `physionet-data.mimiciv_3_1_icu.*` (ICU module)" - ) - return error_msg - - simple_table_name = parts[2] # table name - dataset = f"{parts[0]}.{parts[1]}" # project.dataset - else: - # Simple name - try both datasets to find the table - simple_table_name = table_name - full_table_name = None - dataset = None - - # If we have a fully qualified name, try that first - if full_table_name: - try: - # Get column information using the dataset from the full name - dataset_parts = dataset.split(".") - if len(dataset_parts) >= 2: - project_dataset = f"`{dataset_parts[0]}.{dataset_parts[1]}`" - info_query = f""" - SELECT column_name, data_type, is_nullable - FROM {project_dataset}.INFORMATION_SCHEMA.COLUMNS - WHERE table_name = '{simple_table_name}' - ORDER BY ordinal_position - """ - - info_result = _execute_bigquery_query(info_query) - if "No results found" not in info_result: - result = f"{backend_info}๐Ÿ“‹ **Table:** {full_table_name}\n\n**Column Information:**\n{info_result}" - - if show_sample: - sample_query = f"SELECT * FROM {full_table_name} LIMIT 3" - sample_result = _execute_bigquery_query(sample_query) - result += f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" - - return result - except Exception: - pass # Fall through to try simple name approach - - # Try both datasets with simple name (fallback or original approach) - for dataset in ["mimiciv_3_1_hosp", "mimiciv_3_1_icu"]: - try: - full_table_name = f"`physionet-data.{dataset}.{simple_table_name}`" - - # Get column information - info_query = f""" - SELECT column_name, data_type, is_nullable - FROM `physionet-data.{dataset}.INFORMATION_SCHEMA.COLUMNS` - WHERE table_name = '{simple_table_name}' - ORDER BY ordinal_position - """ - - info_result = _execute_bigquery_query(info_query) - if "No results found" not in info_result: - result = f"{backend_info}๐Ÿ“‹ **Table:** {full_table_name}\n\n**Column Information:**\n{info_result}" - - if show_sample: - sample_query = f"SELECT * FROM {full_table_name} LIMIT 3" - sample_result = _execute_bigquery_query(sample_query) - result += ( - f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" - ) - - return result - except Exception: - continue - - return f"{backend_info}โŒ Table '{table_name}' not found in any dataset. Use get_database_schema() to see available tables." - - -@mcp.tool() -@require_oauth2 -def execute_mimic_query(sql_query: str) -> str: - """๐Ÿš€ Execute SQL queries to analyze MIMIC-IV data. - - **๐Ÿ’ก Pro tip:** For best results, explore the database structure first! - - **Recommended workflow (especially for smaller models):** - 1. **See available tables:** Use `get_database_schema()` to list all tables - 2. **Examine table structure:** Use `get_table_info('table_name')` to see columns and sample data - 3. **Write your SQL query:** Use exact table/column names from exploration - - **Why exploration helps:** - - Table names vary between backends (SQLite vs BigQuery) - - Column names may be unexpected (e.g., age might be 'anchor_age') - - Sample data shows actual formats and constraints - - Args: - sql_query: Your SQL SELECT query (must be SELECT only) - - Returns: - Query results or helpful error messages with next steps - """ - return _execute_query_internal(sql_query) - - -@mcp.tool() -@require_oauth2 -def get_icu_stays(patient_id: int | None = None, limit: int = 10) -> str: - """๐Ÿฅ Get ICU stay information and length of stay data. - - **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. - **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. - - **What you'll get:** Patient IDs, admission times, length of stay, and ICU details. - - Args: - patient_id: Specific patient ID to query (optional) - limit: Maximum number of records to return (default: 10) - - Returns: - ICU stay data as formatted text or guidance if table not found - """ - # Security validation - if not _validate_limit(limit): - return "Error: Invalid limit. Must be a positive integer between 1 and 10000." - - # Try common ICU table names based on backend - if _backend == "sqlite": - icustays_table = "icu_icustays" - else: # bigquery - icustays_table = "`physionet-data.mimiciv_3_1_icu.icustays`" - - if patient_id: - query = f"SELECT * FROM {icustays_table} WHERE subject_id = {patient_id}" - else: - query = f"SELECT * FROM {icustays_table} LIMIT {limit}" - - # Execute with error handling that suggests proper workflow - result = _execute_query_internal(query) - if "error" in result.lower() or "not found" in result.lower(): - return f"""โŒ **Convenience function failed:** {result} - -๐Ÿ’ก **For reliable results, use the proper workflow:** -1. `get_database_schema()` โ† See actual table names -2. `get_table_info('table_name')` โ† Understand structure -3. `execute_mimic_query('your_sql')` โ† Use exact names - -This ensures compatibility across different MIMIC-IV setups.""" - - return result - - -@mcp.tool() -@require_oauth2 -def get_lab_results( - patient_id: int | None = None, lab_item: str | None = None, limit: int = 20 -) -> str: - """๐Ÿงช Get laboratory test results quickly. - - **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. - **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. - - **What you'll get:** Lab values, timestamps, patient IDs, and test details. - - Args: - patient_id: Specific patient ID to query (optional) - lab_item: Lab item to search for in the value field (optional) - limit: Maximum number of records to return (default: 20) - - Returns: - Lab results as formatted text or guidance if table not found - """ - # Security validation - if not _validate_limit(limit): - return "Error: Invalid limit. Must be a positive integer between 1 and 10000." - - # Try common lab table names based on backend - if _backend == "sqlite": - labevents_table = "hosp_labevents" - else: # bigquery - labevents_table = "`physionet-data.mimiciv_3_1_hosp.labevents`" - - # Build query conditions - conditions = [] - if patient_id: - conditions.append(f"subject_id = {patient_id}") - if lab_item: - # Escape single quotes for safety in LIKE clause - escaped_lab_item = lab_item.replace("'", "''") - conditions.append(f"value LIKE '%{escaped_lab_item}%'") - - base_query = f"SELECT * FROM {labevents_table}" - if conditions: - base_query += " WHERE " + " AND ".join(conditions) - base_query += f" LIMIT {limit}" - - # Execute with error handling that suggests proper workflow - result = _execute_query_internal(base_query) - if "error" in result.lower() or "not found" in result.lower(): - return f"""โŒ **Convenience function failed:** {result} - -๐Ÿ’ก **For reliable results, use the proper workflow:** -1. `get_database_schema()` โ† See actual table names -2. `get_table_info('table_name')` โ† Understand structure -3. `execute_mimic_query('your_sql')` โ† Use exact names - -This ensures compatibility across different MIMIC-IV setups.""" - - return result - - -@mcp.tool() -@require_oauth2 -def get_race_distribution(limit: int = 10) -> str: - """๐Ÿ“Š Get race distribution from hospital admissions. - - **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. - **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. - - **What you'll get:** Count of patients by race category, ordered by frequency. - - Args: - limit: Maximum number of race categories to return (default: 10) - - Returns: - Race distribution as formatted text or guidance if table not found - """ - # Security validation - if not _validate_limit(limit): - return "Error: Invalid limit. Must be a positive integer between 1 and 10000." - - # Try common admissions table names based on backend - if _backend == "sqlite": - admissions_table = "hosp_admissions" - else: # bigquery - admissions_table = "`physionet-data.mimiciv_3_1_hosp.admissions`" - - query = f"SELECT race, COUNT(*) as count FROM {admissions_table} GROUP BY race ORDER BY count DESC LIMIT {limit}" - - # Execute with error handling that suggests proper workflow - result = _execute_query_internal(query) - if "error" in result.lower() or "not found" in result.lower(): - return f"""โŒ **Convenience function failed:** {result} - -๐Ÿ’ก **For reliable results, use the proper workflow:** -1. `get_database_schema()` โ† See actual table names -2. `get_table_info('table_name')` โ† Understand structure -3. `execute_mimic_query('your_sql')` โ† Use exact names - -This ensures compatibility across different MIMIC-IV setups.""" - - return result - - -def main(): - """Main entry point for MCP server.""" - # Run the FastMCP server - mcp.run() - - -if __name__ == "__main__": - main() diff --git a/src/m3/tools/__init__.py b/src/m3/tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/m3/tools/mimic/__init__.py b/src/m3/tools/mimic/__init__.py new file mode 100644 index 0000000..3b5642c --- /dev/null +++ b/src/m3/tools/mimic/__init__.py @@ -0,0 +1,4 @@ +from .cli import MimicCLI +from .mimic import MIMIC + +__all__ = ["MIMIC", "MimicCLI"] diff --git a/src/m3/tools/mimic/cli.py b/src/m3/tools/mimic/cli.py new file mode 100644 index 0000000..9b6da2a --- /dev/null +++ b/src/m3/tools/mimic/cli.py @@ -0,0 +1,283 @@ +import json +import logging +import os +from pathlib import Path + +import typer +from beartype import beartype +from beartype.typing import Annotated, Any, Dict, Optional +from rich.console import Console +from rich.panel import Panel +from rich.table import Table + +from m3.core.config import M3Config +from m3.core.tool.cli.base import BaseToolCLI, ToolConfig +from m3.core.utils.exceptions import M3ValidationError +from m3.core.utils.helpers import get_config +from m3.tools.mimic.components.data_io import DataIO +from m3.tools.mimic.components.utils import ( + get_default_database_path, + load_supported_datasets, +) + +logger = logging.getLogger(__name__) + +console = Console() + + +@beartype +class MimicCLI(BaseToolCLI): + @classmethod + def get_app(cls) -> typer.Typer: + app = typer.Typer( + help="MIMIC-IV tool commands.", + add_completion=False, + pretty_exceptions_show_locals=False, + rich_markup_mode="markdown", + ) + app.command(help="Initialise the dataset and database.")(cls.init) + app.command(help="Configure the MIMIC-IV tool.")(cls.configure) + app.command(help="Display the current status of the MIMIC-IV tool.")(cls.status) + return app + + @classmethod + def init( + cls, + dataset: Annotated[ + str, + typer.Option( + "--dataset", + "-d", + help="Dataset name to initialize (e.g., 'mimic-iv-demo').", + ), + ] = "mimic-iv-demo", + db_path: Annotated[ + Optional[str], + typer.Option( + "--db-path", + "-p", + help="Path to save the SQLite DB (defaults to a standard location).", + ), + ] = None, + force: Annotated[ + bool, + typer.Option( + "--force", "-f", help="Force re-download and re-initialization." + ), + ] = False, + ) -> None: + datasets = load_supported_datasets() + if dataset.lower() not in datasets: + console.print("[red]โŒ Unknown dataset. Available:[/red]") + table = Table(show_header=False) + for ds in datasets.keys(): + table.add_row(f"[cyan]{ds}[/cyan]") + console.print(table) + raise typer.Exit(code=1) + + config = get_config() + _db_path = ( + Path(db_path) if db_path else get_default_database_path(config, dataset) + ) + if _db_path is None: + console.print("[red]โŒ Cannot determine DB path.[/red]") + raise typer.Exit(code=1) + + if _db_path.exists() and not force: + console.print( + f"[yellow]โš ๏ธ DB exists at {_db_path}. Use --force to overwrite.[/yellow]" + ) + raise typer.Exit(code=1) + + data_io = DataIO(config) + success = data_io.initialize(dataset, _db_path) + + if success: + console.print(f"[green]โœ… Initialized {dataset} at {_db_path}.[/green]") + else: + console.print(f"[red]โŒ Initialization failed for {dataset}.[/red]") + raise typer.Exit(code=1) + + @classmethod + def configure( + cls, + backend: Annotated[ + Optional[str], + typer.Option("--backend", "-b", help="Backend ('sqlite' or 'bigquery')."), + ] = None, + db_path: Annotated[ + Optional[str], + typer.Option("--db-path", help="SQLite DB path (if backend=sqlite)."), + ] = None, + project_id: Annotated[ + Optional[str], + typer.Option("--project-id", help="GCP Project ID (if backend=bigquery)."), + ] = None, + enable_oauth2: Annotated[ + bool, + typer.Option("--enable-oauth2", "-o", help="Enable OAuth2."), + ] = False, + issuer_url: Annotated[ + Optional[str], + typer.Option("--issuer-url", help="OAuth2 Issuer URL."), + ] = None, + audience: Annotated[ + Optional[str], + typer.Option("--audience", help="OAuth2 Audience."), + ] = None, + required_scopes: Annotated[ + Optional[str], + typer.Option( + "--required-scopes", help="OAuth2 Required Scopes (comma-separated)." + ), + ] = None, + jwks_url: Annotated[ + Optional[str], + typer.Option("--jwks-url", help="OAuth2 JWKS URL (optional)."), + ] = None, + rate_limit_requests: Annotated[ + int, + typer.Option("--rate-limit-requests", help="OAuth2 Rate Limit Requests."), + ] = 100, + output: Annotated[ + Optional[str], + typer.Option("--output", "-o", help="Output path for config JSON."), + ] = None, + verbose: Annotated[ + bool, + typer.Option("--verbose", "-v", help="Print config dict."), + ] = False, + ) -> ToolConfig: + env_vars: Dict[str, str] = {} + tool_params: Dict[str, Any] = {} + + console.print("[turquoise4]๐Ÿ’ฌ Configuring MIMIC-IV tool...[/turquoise4]") + + if not backend: + backend = typer.prompt( + "Backend (sqlite/bigquery)", default="sqlite" + ).lower() + + if backend not in ["sqlite", "bigquery"]: + console.print("[red]โŒ Invalid backend. Use 'sqlite' or 'bigquery'.[/red]") + raise typer.Exit(code=1) + + env_vars["M3_BACKEND"] = backend + tool_params["backend_key"] = backend + + backends_list = [] + if backend == "sqlite": + if db_path is None: + default_db = get_default_database_path(get_config(), "mimic-iv-demo") + if default_db is None: + raise M3ValidationError("Cannot determine default DB path") + console.print(f"[yellow]๐Ÿ’ฌ Default DB path: {default_db}[/yellow]") + db_path = typer.prompt( + "SQLite DB path (Enter for default)", default=str(default_db) + ) + if db_path and not Path(db_path).exists(): + console.print( + f"[yellow]โš ๏ธ DB path {db_path} does not exist. Using default path.[/yellow]" + ) + db_path = str(get_default_database_path(get_config(), "mimic-iv-demo")) + env_vars["M3_DB_PATH"] = db_path + backends_list.append({"type": "sqlite", "params": {"path": db_path}}) + elif backend == "bigquery": + if project_id is None: + project_id = typer.prompt("GCP Project ID (required)") + if not project_id: + raise M3ValidationError("Project ID required for BigQuery") + env_vars["M3_PROJECT_ID"] = project_id + env_vars["GOOGLE_CLOUD_PROJECT"] = project_id + backends_list.append( + {"type": "bigquery", "params": {"project": project_id}} + ) + + tool_params["backends"] = backends_list + + if enable_oauth2: + if issuer_url is None: + issuer_url = typer.prompt("Issuer URL") + if audience is None: + audience = typer.prompt("Audience") + if required_scopes is None: + required_scopes = typer.prompt( + "Scopes [read:mimic-data]", default="read:mimic-data" + ) + env_vars.update( + { + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": issuer_url, + "M3_OAUTH2_AUDIENCE": audience, + "M3_OAUTH2_REQUIRED_SCOPES": required_scopes, + } + ) + if jwks_url is None: + jwks_url = typer.prompt("JWKS URL (optional)", default="") + jwks_url = jwks_url.strip() + if jwks_url: + env_vars["M3_OAUTH2_JWKS_URL"] = jwks_url + env_vars["M3_OAUTH2_RATE_LIMIT_REQUESTS"] = str(rate_limit_requests) + + console.print( + "\n[turquoise4]๐Ÿ’ฌ Additional env vars (key=value, Enter to finish):[/turquoise4]" + ) + additional_env = {} + while True: + env_var = typer.prompt("", default="", show_default=False) + if not env_var: + break + if "=" in env_var: + key, value = env_var.split("=", 1) + additional_env[key.strip()] = value.strip() + else: + console.print("[red]Invalid: Use key=value[/red]") + env_vars.update(additional_env) + + config_dict = {"env_vars": env_vars, "tool_params": tool_params} + + output = output or "mimic_config.json" + with open(output, "w") as f: + json.dump(config_dict, f, indent=4) + console.print(f"[green]โœ… Config dict saved to {output}[/green]") + + if verbose: + console.print( + Panel( + json.dumps(config_dict, indent=2), + title="[bold green]Configuration[/bold green]", + border_style="green", + ) + ) + return config_dict + + @classmethod + def status(cls, verbose: bool = False) -> None: + try: + config = M3Config(env_vars=os.environ.copy()) + _db_path = ( + str(get_default_database_path(config, "mimic-iv-demo")) or "Default" + ) + + table = Table(title="[bold green]MIMIC Tool Status[/bold green]") + table.add_column("Key", style="cyan") + table.add_column("Value", style="magenta") + table.add_row("Backend", config.get_env_var("M3_BACKEND", "sqlite")) + table.add_row("DB Path", config.get_env_var("M3_DB_PATH", _db_path)) + table.add_row( + "OAuth2 Enabled", config.get_env_var("M3_OAUTH2_ENABLED", "No") + ) + console.print(table) + if verbose: + env_table = Table( + title="[bold green]Environment Variables (M3_*)[/bold green]" + ) + env_table.add_column("Key", style="cyan") + env_table.add_column("Value", style="magenta") + for key, value in sorted(config.env_vars.items()): + if key.startswith("M3_"): + env_table.add_row(key, value) + console.print(env_table) + except Exception as e: + console.print(f"[red]โŒ Error getting status: {e}[/red]") + logger.error(f"Status failed: {e}") diff --git a/src/m3/tools/mimic/components/__init__.py b/src/m3/tools/mimic/components/__init__.py new file mode 100644 index 0000000..29261d7 --- /dev/null +++ b/src/m3/tools/mimic/components/__init__.py @@ -0,0 +1,4 @@ +from .auth import Auth +from .data_io import DataIO + +__all__ = ["Auth", "DataIO"] diff --git a/src/m3/tools/mimic/components/auth.py b/src/m3/tools/mimic/components/auth.py new file mode 100644 index 0000000..9b09975 --- /dev/null +++ b/src/m3/tools/mimic/components/auth.py @@ -0,0 +1,224 @@ +import asyncio +import logging +import time +from collections.abc import Callable +from functools import wraps +from urllib.parse import urljoin + +import httpx +import jwt +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + +from m3.core.config import M3Config +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +class Auth: + def __init__(self, config: M3Config) -> None: + self.config = config + self._set_enabled() + if not self.enabled: + return + self._set_issuer_and_audience() + self._set_required_scopes() + self._set_jwks_url() + self._set_cache() + self._set_http_client() + self._set_rate_limit() + self._set_validation_flags() + logger.info(f"OAuth2 enabled: {self.enabled}, issuer: {self.issuer_url}") + + async def authenticate(self, token: str) -> Dict[str, Any]: + jwks = await self._get_jwks() + unverified_header = jwt.get_unverified_header(token) + kid = unverified_header.get("kid") + if not kid: + raise M3ValidationError("Token missing key ID (kid)") + key = self._find_key(jwks, kid) + if not key: + raise M3ValidationError(f"No key found for kid: {kid}") + public_key = self._jwk_to_pem(key) + payload = jwt.decode( + token, + public_key, + algorithms=["RS256", "ES256"], + audience=self.audience, + issuer=self.issuer_url, + ) + self._validate_scopes(payload) + if self.rate_limit_enabled: + self._check_rate_limit(payload) + return payload + + @staticmethod + def generate_test_token( + issuer: str = "https://test-issuer.example.com", + audience: str = "m3-api", + subject: str = "test-user", + scopes: Optional[List[str]] = None, + expires_in: int = 3600, + ) -> str: + from datetime import datetime, timedelta, timezone + + scopes = scopes or ["read:mimic-data"] + now = datetime.now(timezone.utc) + claims = { + "iss": issuer, + "aud": audience, + "sub": subject, + "iat": int(now.timestamp()), + "exp": int((now + timedelta(seconds=expires_in)).timestamp()), + "scope": " ".join(scopes), + } + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + private_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + return jwt.encode(claims, private_pem, algorithm="RS256") + + def decorator(self, func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + if not self.enabled: + return ( + await func(*args, **kwargs) + if asyncio.iscoroutinefunction(func) + else func(*args, **kwargs) + ) + token = self.config.get_env_var("M3_OAUTH2_TOKEN", "") + if token.startswith("Bearer "): + token = token[7:] + if not token: + raise M3ValidationError("Missing OAuth2 access token") + await self.authenticate(token) + return ( + await func(*args, **kwargs) + if asyncio.iscoroutinefunction(func) + else func(*args, **kwargs) + ) + + return wrapper + + async def _get_jwks(self) -> Dict[str, Any]: + current_time = time.time() + if ( + self._jwks_cache + and current_time - self._jwks_cache_time < self.jwks_cache_ttl + ): + return self._jwks_cache + async with httpx.AsyncClient(timeout=30.0) as client: + response = await client.get(self.jwks_url) + response.raise_for_status() + jwks = response.json() + self._jwks_cache = jwks + self._jwks_cache_time = current_time + return jwks + + def _find_key(self, jwks: Dict[str, Any], kid: str) -> Optional[Dict[str, Any]]: + keys = jwks.get("keys", []) + for key in keys: + if key.get("kid") == kid: + return key + return None + + def _jwk_to_pem(self, jwk: Dict[str, Any]) -> bytes: + from jose.utils import base64url_decode + + if jwk.get("kty") == "RSA": + n = base64url_decode(jwk["n"]) + e = base64url_decode(jwk["e"]) + public_numbers = rsa.RSAPublicNumbers( + int.from_bytes(e, byteorder="big"), + int.from_bytes(n, byteorder="big"), + ) + public_key = public_numbers.public_key() + pem = public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return pem + raise M3ValidationError(f"Unsupported key type: {jwk.get('kty')}") + + def _validate_scopes(self, payload: Dict[str, Any]) -> None: + token_scopes = set() + scope_claim = payload.get("scope", "") + if isinstance(scope_claim, str): + token_scopes = set(scope_claim.split()) + elif isinstance(scope_claim, list): + token_scopes = set(scope_claim) + scp_claim = payload.get("scp", []) + if isinstance(scp_claim, list): + token_scopes.update(scp_claim) + missing_scopes = self.required_scopes - token_scopes + if missing_scopes: + raise M3ValidationError(f"Missing required scopes: {missing_scopes}") + + def _check_rate_limit(self, payload: Dict[str, Any]) -> None: + user_id = payload.get("sub", "unknown") + current_time = time.time() + window_start = current_time - self.rate_limit_window + user_requests = self._rate_limit_cache.get(user_id, []) + user_requests = [t for t in user_requests if t > window_start] + if len(user_requests) >= self.rate_limit_requests: + raise M3ValidationError("Rate limit exceeded") + user_requests.append(current_time) + self._rate_limit_cache[user_id] = user_requests + + def _set_enabled(self) -> None: + self.enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + + def _set_issuer_and_audience(self) -> None: + self.issuer_url = self.config.get_env_var( + "M3_OAUTH2_ISSUER_URL", raise_if_missing=True + ) + self.audience = self.config.get_env_var( + "M3_OAUTH2_AUDIENCE", raise_if_missing=True + ) + + def _set_required_scopes(self) -> None: + self.required_scopes = { + scope.strip() + for scope in self.config.get_env_var( + "M3_OAUTH2_REQUIRED_SCOPES", "read:mimic-data" + ).split(",") + } + + def _set_jwks_url(self) -> None: + self.jwks_url = self.config.get_env_var("M3_OAUTH2_JWKS_URL") or urljoin( + self.issuer_url.rstrip("/"), "/.well-known/jwks.json" + ) + + def _set_cache(self) -> None: + self.jwks_cache_ttl = 3600 + self._jwks_cache = {} + self._jwks_cache_time = 0 + + def _set_http_client(self) -> None: + self.http_client = httpx.Client(timeout=30.0) + + def _set_rate_limit(self) -> None: + self.rate_limit_enabled = True + self.rate_limit_requests = 100 + self.rate_limit_window = 3600 + self._rate_limit_cache = {} + + def _set_validation_flags(self) -> None: + self.validate_exp = ( + self.config.get_env_var("M3_OAUTH2_VALIDATE_EXP", "true").lower() == "true" + ) + self.validate_aud = ( + self.config.get_env_var("M3_OAUTH2_VALIDATE_AUD", "true").lower() == "true" + ) + self.validate_iss = ( + self.config.get_env_var("M3_OAUTH2_VALIDATE_ISS", "true").lower() == "true" + ) diff --git a/src/m3/tools/mimic/components/data_io.py b/src/m3/tools/mimic/components/data_io.py new file mode 100644 index 0000000..fc840b4 --- /dev/null +++ b/src/m3/tools/mimic/components/data_io.py @@ -0,0 +1,225 @@ +import logging +from pathlib import Path +from urllib.parse import urljoin, urlparse + +import polars as pl +import requests +from beartype import beartype +from beartype.typing import Any, Dict, List +from bs4 import BeautifulSoup +from rich.console import Console +from rich.progress import Progress + +from m3.core.config import M3Config +from m3.core.utils.exceptions import M3ValidationError +from m3.tools.mimic.components.utils import ( + get_dataset_config, + get_dataset_raw_files_path, +) + +logger = logging.getLogger(__name__) + +COMMON_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" + +console = Console() + + +@beartype +class DataIO: + def __init__(self, config: M3Config) -> None: + self.config = config + + def initialize(self, dataset: str, path: Path) -> bool: + dataset_config = self._get_dataset_config(dataset) + raw_files_root_dir = self._get_raw_files_path(dataset) + logger.info(f"Initializing {dataset} at {path}") + console.print( + f"[turquoise4]๐Ÿ’ฌ Initializing {dataset} at {path}...[/turquoise4]" + ) + + console.print("[cyan]Downloading dataset files...[/cyan]") + if not self._download_dataset_files(dataset_config, raw_files_root_dir): + logger.error(f"Download failed for {dataset}.") + console.print(f"[red]โŒ Download failed for {dataset}.[/red]") + return False + + console.print("[cyan]Loading files to SQLite...[/cyan]") + if not self._etl_csv_collection_to_sqlite(raw_files_root_dir, path): + logger.error(f"ETL failed for {dataset}.") + console.print(f"[red]โŒ ETL failed for {dataset}.[/red]") + return False + + logger.info(f"Successfully initialized {dataset}.") + console.print(f"[green]โœ… Successfully initialized {dataset}.[/green]") + return True + + def _get_dataset_config(self, dataset: str) -> Dict[str, Any]: + config = get_dataset_config(dataset) + if not config: + raise M3ValidationError(f"Config not found for '{dataset}'.") + return config + + def _get_raw_files_path(self, dataset: str) -> Path: + path = get_dataset_raw_files_path(self.config, dataset) + if path is None: + raise M3ValidationError(f"Raw files path not found for '{dataset}'.") + return path + + def _download_dataset_files( + self, + dataset_config: Dict[str, Any], + raw_files_root_dir: Path, + ) -> bool: + base_listing_url = dataset_config["file_listing_url"] + subdirs_to_scan = dataset_config.get("subdirectories_to_scan", []) + session = requests.Session() + session.headers.update({"User-Agent": COMMON_USER_AGENT}) + all_files_to_process = [] + for subdir_name in subdirs_to_scan: + subdir_listing_url = urljoin(base_listing_url, f"{subdir_name}/") + csv_urls_in_subdir = self._scrape_urls_from_html_page( + subdir_listing_url, session + ) + if not csv_urls_in_subdir: + continue + for file_url in csv_urls_in_subdir: + url_path_obj = Path(urlparse(file_url).path) + base_listing_url_path_obj = Path(urlparse(base_listing_url).path) + relative_file_path = ( + url_path_obj.relative_to(base_listing_url_path_obj) + if url_path_obj.as_posix().startswith( + base_listing_url_path_obj.as_posix() + ) + else Path(subdir_name) / url_path_obj.name + ) + local_target_path = raw_files_root_dir / relative_file_path + all_files_to_process.append((file_url, local_target_path)) + if not all_files_to_process: + return False + unique_files_to_process = sorted(set(all_files_to_process), key=lambda x: x[1]) + downloaded_count = 0 + for file_url, target_filepath in unique_files_to_process: + if not self._download_single_file(file_url, target_filepath, session): + return False + downloaded_count += 1 + return downloaded_count == len(unique_files_to_process) + + def _download_single_file( + self, url: str, target_filepath: Path, session: requests.Session + ) -> bool: + try: + response = session.get(url, stream=True, timeout=60) + response.raise_for_status() + total_size = int(response.headers.get("content-length", 0)) + target_filepath.parent.mkdir(parents=True, exist_ok=True) + with open(target_filepath, "wb") as file_object: + with Progress(console=console, transient=True) as progress: + task = progress.add_task( + f"[cyan]Downloading {target_filepath.name}", total=total_size + ) + for chunk in response.iter_content(chunk_size=8192): + if chunk: + file_object.write(chunk) + progress.update(task, advance=len(chunk)) + return True + except Exception as e: + logger.error(f"Download failed for {url}: {e}") + if target_filepath.exists(): + target_filepath.unlink() + console.print(f"[red]โŒ Download failed for {url}: {e}[/red]") + return False + + def _scrape_urls_from_html_page( + self, page_url: str, session: requests.Session, file_suffix: str = ".csv.gz" + ) -> List[str]: + found_urls = [] + try: + page_response = session.get(page_url, timeout=30) + page_response.raise_for_status() + soup = BeautifulSoup(page_response.content, "html.parser") + for link_tag in soup.find_all("a", href=True): + href_path = link_tag["href"] + if ( + href_path.endswith(file_suffix) + and not href_path.startswith(("?", "#")) + and ".." not in href_path + ): + absolute_url = urljoin(page_url, href_path) + found_urls.append(absolute_url) + except Exception as e: + logger.error(f"Scrape failed for {page_url}: {e}") + console.print(f"[red]โŒ Scrape failed for {page_url}: {e}[/red]") + return found_urls + + def _etl_csv_collection_to_sqlite( + self, csv_source_dir: Path, db_target_path: Path + ) -> bool: + db_target_path.parent.mkdir(parents=True, exist_ok=True) + db_connection_uri = f"sqlite:///{db_target_path.resolve()}" + csv_file_paths = list(csv_source_dir.rglob("*.csv.gz")) + if not csv_file_paths: + return False + successfully_loaded_count = 0 + files_with_errors = [] + with Progress(console=console) as progress: + total_task = progress.add_task( + "[cyan]Loading CSV files to SQLite...", total=len(csv_file_paths) + ) + for csv_file_path in csv_file_paths: + relative_path = csv_file_path.relative_to(csv_source_dir) + table_name_parts = [part.lower() for part in relative_path.parts] + table_name = ( + "_".join(table_name_parts) + .replace(".csv.gz", "") + .replace("-", "_") + .replace(".", "_") + ) + try: + dataframe = self._load_csv_with_robust_parsing( + csv_file_path, table_name + ) + dataframe.write_database( + table_name=table_name, + connection=db_connection_uri, + if_table_exists="replace", + engine="sqlalchemy", + ) + successfully_loaded_count += 1 + except Exception as e: + err_msg = ( + f"ETL error for '{relative_path}' (table '{table_name}'): {e}" + ) + logger.error(err_msg, exc_info=True) + files_with_errors.append(err_msg) + console.print(f"[red]โŒ {err_msg}[/red]") + progress.update(total_task, advance=1) + + if files_with_errors: + logger.warning(f"ETL errors in {len(files_with_errors)} files:") + for detail in files_with_errors: + logger.warning(f" - {detail}") + + return successfully_loaded_count == len(csv_file_paths) + + def _load_csv_with_robust_parsing( + self, csv_file_path: Path, table_name: str + ) -> pl.DataFrame: + try: + dataframe = pl.read_csv( + source=csv_file_path, + infer_schema_length=None, + try_parse_dates=True, + ignore_errors=False, + null_values=["", "NULL", "null", "\\N", "NA"], + ) + if dataframe.height > 0: + empty_columns = [ + column + for column in dataframe.columns + if dataframe[column].is_null().all() + ] + if empty_columns: + logger.debug(f"Empty columns in {table_name}: {empty_columns}") + return dataframe + except Exception as e: + raise M3ValidationError(f"Failed to parse CSV {csv_file_path}: {e}") from e diff --git a/src/m3/tools/mimic/components/utils.py b/src/m3/tools/mimic/components/utils.py new file mode 100644 index 0000000..967c55d --- /dev/null +++ b/src/m3/tools/mimic/components/utils.py @@ -0,0 +1,88 @@ +import logging +from pathlib import Path + +import yaml +from beartype import beartype +from beartype.typing import Any, Dict + +from m3.core.config import M3Config +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +def load_supported_datasets() -> Dict[str, Dict[str, Any]]: + yaml_path = Path(__file__).parent.parent / "configurations" / "datasets.yaml" + if not yaml_path.exists(): + raise RuntimeError(f"datasets.yaml not found at {yaml_path}") + with open(yaml_path) as f: + return yaml.safe_load(f) + + +@beartype +def get_dataset_config(dataset_name: str) -> Dict[str, Any] | None: + datasets = load_supported_datasets() + return datasets.get(dataset_name.lower()) + + +@beartype +def get_default_database_path(base_config: M3Config, dataset_name: str) -> Path | None: + cfg = get_dataset_config(dataset_name) + if not cfg: + return None + default_filename = cfg.get("default_db_filename", f"{dataset_name}.db") + env_key = f"M3_{dataset_name.upper()}_DATA_DIR" + default_dir_str = base_config.get_env_var(env_key) + default_dir = ( + Path(default_dir_str) + if default_dir_str + else base_config.databases_dir / dataset_name + ) + return default_dir / default_filename + + +@beartype +def get_dataset_raw_files_path(base_config: M3Config, dataset_name: str) -> Path | None: + cfg = get_dataset_config(dataset_name) + if not cfg: + logger.warning(f"Unknown dataset: {dataset_name}") + return None + env_key = f"M3_{dataset_name.upper()}_RAW_DIR" + raw_dir_str = base_config.get_env_var(env_key) + path = ( + Path(raw_dir_str) + if raw_dir_str + else base_config.raw_files_dir / dataset_name.lower() + ) + path.mkdir(parents=True, exist_ok=True) + return path + + +@beartype +def load_security_config() -> Dict[str, Any]: + yaml_path = Path(__file__).parent.parent / "configurations" / "security.yaml" + if not yaml_path.exists(): + raise RuntimeError(f"security.yaml not found at {yaml_path}") + with open(yaml_path) as f: + return yaml.safe_load(f) + + +@beartype +def load_env_vars_config() -> Dict[str, Any]: + yaml_path = Path(__file__).parent.parent / "configurations" / "env_vars.yaml" + if not yaml_path.exists(): + raise M3ValidationError(f"env_vars.yaml not found at {yaml_path}") + try: + with open(yaml_path) as f: + config = yaml.safe_load(f) + if not isinstance(config, dict): + raise ValueError("Invalid YAML structure; expected a dictionary.") + logger.debug(f"Loaded env_vars.yaml from {yaml_path}") + return config + except (yaml.YAMLError, ValueError) as e: + raise M3ValidationError(f"Failed to load env_vars.yaml: {e}") from e + + +def validate_limit(limit: int) -> bool: + return isinstance(limit, int) and 0 < limit <= 1000 diff --git a/src/m3/tools/mimic/configurations/datasets.yaml b/src/m3/tools/mimic/configurations/datasets.yaml new file mode 100644 index 0000000..23fe5c4 --- /dev/null +++ b/src/m3/tools/mimic/configurations/datasets.yaml @@ -0,0 +1,5 @@ +mimic-iv-demo: + file_listing_url: "https://physionet.org/files/mimic-iv-demo/2.2/" + subdirectories_to_scan: ["hosp", "icu"] + default_db_filename: "mimic_iv_demo.db" + primary_verification_table: "hosp_admissions" diff --git a/src/m3/tools/mimic/configurations/env_vars.yaml b/src/m3/tools/mimic/configurations/env_vars.yaml new file mode 100644 index 0000000..a45537a --- /dev/null +++ b/src/m3/tools/mimic/configurations/env_vars.yaml @@ -0,0 +1,87 @@ +core: + - key: M3_BACKEND + default: sqlite + required: false + description: "Backend type ('sqlite' or 'bigquery'). Determines data storage and query engine." + +backends: + sqlite: + - key: M3_DB_PATH + default: null + required: false + description: "Path to SQLite database file." + - key: M3_ICUSTAYS_TABLE + default: icu_icustays + required: false + description: "Table name for ICU stays in SQLite." + - key: M3_LABEVENTS_TABLE + default: hosp_labevents + required: false + description: "Table name for lab events in SQLite." + - key: M3_ADMISSIONS_TABLE + default: hosp_admissions + required: false + description: "Table name for admissions in SQLite." + bigquery: + - key: M3_PROJECT_ID + default: null + required: true + description: "GCP Project ID for BigQuery." + - key: GOOGLE_CLOUD_PROJECT + default: null + required: true + description: "GCP Project ID (alias for M3_PROJECT_ID)." + - key: M3_BIGQUERY_PREFIX + default: "`physionet-data.mimiciv_3_1_" + required: false + description: "Prefix for BigQuery table names." + - key: M3_BIGQUERY_HOSP_DATASET + default: mimiciv_3_1_hosp + required: false + description: "BigQuery dataset for hospital data." + - key: M3_BIGQUERY_ICU_DATASET + default: mimiciv_3_1_icu + required: false + description: "BigQuery dataset for ICU data." + - key: M3_BIGQUERY_PROJECT + default: physionet-data + required: false + description: "BigQuery project name." + +oauth2: + - key: M3_OAUTH2_ENABLED + default: false + required: false + description: "Enable OAuth2 authentication (true/false)." + - key: M3_OAUTH2_ISSUER_URL + default: null + required: true + description: "OAuth2 issuer URL. (Required if OAuth2 is enabled.)" + - key: M3_OAUTH2_AUDIENCE + default: null + required: true + description: "OAuth2 audience. (Required if OAuth2 is enabled.)" + - key: M3_OAUTH2_REQUIRED_SCOPES + default: read:mimic-data + required: false + description: "Required OAuth2 scopes (comma-separated)." + - key: M3_OAUTH2_JWKS_URL + default: null + required: false + description: "OAuth2 JWKS URL (optional; auto-derived if unset)." + - key: M3_OAUTH2_VALIDATE_EXP + default: true + required: false + description: "Validate token expiration (true/false)." + - key: M3_OAUTH2_VALIDATE_AUD + default: true + required: false + description: "Validate token audience (true/false)." + - key: M3_OAUTH2_VALIDATE_ISS + default: true + required: false + description: "Validate token issuer (true/false)." + - key: M3_OAUTH2_TOKEN + default: "" + required: false + description: "OAuth2 access token (set at runtime)." diff --git a/src/m3/tools/mimic/configurations/security.yaml b/src/m3/tools/mimic/configurations/security.yaml new file mode 100644 index 0000000..4232b20 --- /dev/null +++ b/src/m3/tools/mimic/configurations/security.yaml @@ -0,0 +1,40 @@ +dangerous_keywords: + - INSERT + - UPDATE + - DELETE + - DROP + - CREATE + - ALTER + - TRUNCATE + - REPLACE + - MERGE + - EXEC + - EXECUTE + +injection_patterns: + - ["1=1", "Classic injection pattern"] + - ["OR 1=1", "Boolean injection pattern"] + - ["AND 1=1", "Boolean injection pattern"] + - ["OR '1'='1'", "String injection pattern"] + - ["AND '1'='1'", "String injection pattern"] + - ["WAITFOR", "Time-based injection"] + - ["SLEEP(", "Time-based injection"] + - ["BENCHMARK(", "Time-based injection"] + - ["LOAD_FILE(", "File access injection"] + - ["INTO OUTFILE", "File write injection"] + - ["INTO DUMPFILE", "File write injection"] + +suspicious_names: + - PASSWORD + - ADMIN + - USER + - LOGIN + - AUTH + - TOKEN + - CREDENTIAL + - SECRET + - KEY + - HASH + - SALT + - SESSION + - COOKIE diff --git a/src/m3/tools/mimic/mimic.py b/src/m3/tools/mimic/mimic.py new file mode 100644 index 0000000..8b3b795 --- /dev/null +++ b/src/m3/tools/mimic/mimic.py @@ -0,0 +1,512 @@ +import logging +from collections.abc import Callable + +import sqlparse +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional, Tuple + +from m3.core.config import M3Config +from m3.core.tool.backend.base import BackendBase +from m3.core.tool.backend.registry import BACKEND_REGISTRY +from m3.core.tool.base import BaseTool +from m3.core.utils.exceptions import M3ValidationError +from m3.tools.mimic.components.auth import Auth +from m3.tools.mimic.components.data_io import DataIO +from m3.tools.mimic.components.utils import ( + load_env_vars_config, + load_security_config, + validate_limit, +) + +logger = logging.getLogger(__name__) + + +@beartype +class MIMIC(BaseTool): + @beartype + def __init__( + self, + backends: List[BackendBase], + config: Optional[M3Config] = None, + data_io: Optional[DataIO] = None, + backend_key: str = "sqlite", + ) -> None: + super().__init__() + self.config = config or M3Config() + self.env_config = load_env_vars_config() + self._set_required_env_vars(backend_key) + self._set_backends(backends) + self.data_io = data_io or DataIO(self.config) + self.backend_key = backend_key + self._set_auth() + self._validate_backend_key(backend_key) + self.security_config = {} + self.table_names = {} + + def to_dict(self) -> Dict[str, Any]: + return { + "backend_key": self.backend_key, + "backends": [ + {"type": k, "params": v.to_dict()} for k, v in self.backends.items() + ], + } + + @classmethod + def from_dict(cls, params: Dict[str, Any]) -> "MIMIC": + try: + backends_list = [] + for bd in params["backends"]: + backend_type = bd["type"] + if backend_type not in BACKEND_REGISTRY: + raise ValueError(f"Unknown backend type: {backend_type}") + backend_cls = BACKEND_REGISTRY[backend_type] + backends_list.append(backend_cls.from_dict(bd["params"])) + return cls( + backends=backends_list, + backend_key=params["backend_key"], + ) + except KeyError as e: + raise ValueError(f"Missing required param: {e}") from e + except Exception as e: + raise ValueError(f"Failed to reconstruct MIMIC: {e}") from e + + def actions(self) -> List[Callable]: + def get_database_schema() -> str: + """๐Ÿ” Discover what data is available in the MIMIC-IV database. + + **When to use:** Start here when you need to understand what tables exist, or when someone asks about data that might be in multiple tables. + + **What this does:** Shows all available tables so you can identify which ones contain the data you need. + + **Next steps after using this:** + - If you see relevant tables, use `get_table_info(table_name)` to explore their structure + - Common tables: `patients` (demographics), `admissions` (hospital stays), `icustays` (ICU data), `labevents` (lab results) + + Returns: + List of all available tables in the database with current backend info + """ + backend_info = self._get_backend_info() + if "sqlite" in self.backend_key.lower(): + query = ( + "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" + ) + result = self.backends[self.backend_key].execute(query) + return f"{backend_info}\n๐Ÿ“‹ **Available Tables:**\n{result}" + else: + hosp_dataset = self.config.get_env_var( + "M3_BIGQUERY_HOSP_DATASET", "mimiciv_3_1_hosp" + ) + icu_dataset = self.config.get_env_var( + "M3_BIGQUERY_ICU_DATASET", "mimiciv_3_1_icu" + ) + project = self.config.get_env_var( + "M3_BIGQUERY_PROJECT", "physionet-data" + ) + query = f""" + SELECT CONCAT('`{project}.{hosp_dataset}.', table_name, '`') as query_ready_table_name + FROM `{project}.{hosp_dataset}.INFORMATION_SCHEMA.TABLES` + UNION ALL + SELECT CONCAT('`{project}.{icu_dataset}.', table_name, '`') as query_ready_table_name + FROM `{project}.{icu_dataset}.INFORMATION_SCHEMA.TABLES` + ORDER BY query_ready_table_name + """ + result = self.backends[self.backend_key].execute(query) + return f"{backend_info}\n๐Ÿ“‹ **Available Tables (query-ready names):**\n{result}\n\n๐Ÿ’ก **Copy-paste ready:** These table names can be used directly in your SQL queries!" + + def get_table_info(table_name: str, show_sample: bool = True) -> str: + """๐Ÿ“‹ Explore a specific table's structure and see sample data. + + **When to use:** After you know which table you need (from `get_database_schema()`), use this to understand the columns and data format. + + **What this does:** + - Shows column names, types, and constraints + - Displays sample rows so you understand the actual data format + - Helps you write accurate SQL queries + + **Pro tip:** Always look at sample data! It shows you the actual values, date formats, and data patterns. + + Args: + table_name: Exact table name from the schema (case-sensitive). Can be simple name or fully qualified BigQuery name. + show_sample: Whether to include sample rows (default: True, recommended) + + Returns: + Complete table structure with sample data to help you write queries + """ + backend_info = self._get_backend_info() + if "sqlite" in self.backend_key.lower(): + pragma_query = f"PRAGMA table_info({table_name})" + try: + result = self.backends[self.backend_key].execute(pragma_query) + info_result = f"{backend_info}๐Ÿ“‹ **Table:** {table_name}\n\n**Column Information:**\n{result}" + if show_sample: + sample_query = f"SELECT * FROM {table_name} LIMIT 3" + sample_result = self.backends[self.backend_key].execute( + sample_query + ) + info_result += ( + f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" + ) + return info_result + except Exception as e: + return f"{backend_info}โŒ Error examining table '{table_name}': {e}\n\n๐Ÿ’ก Use get_database_schema() to see available tables." + else: + if "." in table_name and "physionet-data" in table_name: + clean_name = table_name.strip("`") + full_table_name = f"`{clean_name}`" + parts = clean_name.split(".") + if len(parts) != 3: + return f"{backend_info}โŒ **Invalid qualified table name:** `{table_name}`\n\n**Expected format:** `project.dataset.table`\n**Example:** `physionet-data.mimiciv_3_1_hosp.diagnoses_icd`\n\n**Available MIMIC-IV datasets:**\n- `physionet-data.mimiciv_3_1_hosp.*` (hospital module)\n- `physionet-data.mimiciv_3_1_icu.*` (ICU module)" + simple_table_name = parts[2] + dataset = f"{parts[0]}.{parts[1]}" + else: + simple_table_name = table_name + full_table_name = None + dataset = None + + if full_table_name: + try: + info_query = f""" + SELECT column_name, data_type, is_nullable + FROM {dataset}.INFORMATION_SCHEMA.COLUMNS + WHERE table_name = '{simple_table_name}' + ORDER BY ordinal_position + """ + info_result = self.backends[self.backend_key].execute( + info_query + ) + if "No results found" not in info_result: + result = f"{backend_info}๐Ÿ“‹ **Table:** {full_table_name}\n\n**Column Information:**\n{info_result}" + if show_sample: + sample_query = ( + f"SELECT * FROM {full_table_name} LIMIT 3" + ) + sample_result = self.backends[self.backend_key].execute( + sample_query + ) + result += f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" + return result + except Exception: + pass + + for ds in [ + self.config.get_env_var( + "M3_BIGQUERY_HOSP_DATASET", "mimiciv_3_1_hosp" + ), + self.config.get_env_var( + "M3_BIGQUERY_ICU_DATASET", "mimiciv_3_1_icu" + ), + ]: + try: + full_table_name = f"`{self.config.get_env_var('M3_BIGQUERY_PROJECT', 'physionet-data')}.{ds}.{simple_table_name}`" + info_query = f""" + SELECT column_name, data_type, is_nullable + FROM `{self.config.get_env_var("M3_BIGQUERY_PROJECT", "physionet-data")}.{ds}.INFORMATION_SCHEMA.COLUMNS` + WHERE table_name = '{simple_table_name}' + ORDER BY ordinal_position + """ + info_result = self.backends[self.backend_key].execute( + info_query + ) + if "No results found" not in info_result: + result = f"{backend_info}๐Ÿ“‹ **Table:** {full_table_name}\n\n**Column Information:**\n{info_result}" + if show_sample: + sample_query = ( + f"SELECT * FROM {full_table_name} LIMIT 3" + ) + sample_result = self.backends[self.backend_key].execute( + sample_query + ) + result += f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" + return result + except Exception: + continue + return f"{backend_info}โŒ Table '{table_name}' not found in any dataset. Use get_database_schema() to see available tables." + + def execute_mimic_query(sql_query: str) -> str: + """๐Ÿš€ Execute SQL queries to analyze MIMIC-IV data. + + **๐Ÿ’ก Pro tip:** For best results, explore the database structure first! + + **Recommended workflow (especially for smaller models):** + 1. **See available tables:** Use `get_database_schema()` to list all tables + 2. **Examine table structure:** Use `get_table_info('table_name')` to see columns and sample data + 3. **Write your SQL query:** Use exact table/column names from exploration + + **Why exploration helps:** + - Table names vary between backends (SQLite vs BigQuery) + - Column names may be unexpected (e.g., age might be 'anchor_age') + - Sample data shows actual formats and constraints + + Args: + sql_query: Your SQL SELECT query (must be SELECT only) + + Returns: + Query results or helpful error messages with next steps + """ + is_safe, message = self._is_safe_query(sql_query) + if not is_safe: + if "describe" in sql_query.lower() or "show" in sql_query.lower(): + return f"โŒ **Security Error:** {message}\n\n๐Ÿ” **For table structure:** Use `get_table_info('table_name')` instead of DESCRIBE\n๐Ÿ“‹ **Why this is better:** Shows columns, types, AND sample data to understand the actual data\n\n๐Ÿ’ก **Recommended workflow:**\n1. `get_database_schema()` โ† See available tables\n2. `get_table_info('table_name')` โ† Explore structure\n3. `execute_mimic_query('SELECT ...')` โ† Run your analysis" + return f"โŒ **Security Error:** {message}\n\n๐Ÿ’ก **Tip:** Only SELECT statements are allowed for data analysis." + try: + result = self.backends[self.backend_key].execute(sql_query) + return result + except Exception as e: + error_msg = str(e).lower() + suggestions = [] + if "no such table" in error_msg or "table not found" in error_msg: + suggestions.append( + "๐Ÿ” **Table name issue:** Use `get_database_schema()` to see exact table names" + ) + suggestions.append( + f"๐Ÿ“‹ **Backend-specific naming:** {self.backend_key} has specific table naming conventions" + ) + suggestions.append( + "๐Ÿ’ก **Quick fix:** Check if the table name matches exactly (case-sensitive)" + ) + if "no such column" in error_msg or "column not found" in error_msg: + suggestions.append( + "๐Ÿ” **Column name issue:** Use `get_table_info('table_name')` to see available columns" + ) + suggestions.append( + "๐Ÿ“ **Common issue:** Column might be named differently (e.g., 'anchor_age' not 'age')" + ) + suggestions.append( + "๐Ÿ‘€ **Check sample data:** `get_table_info()` shows actual column names and sample values" + ) + if "syntax error" in error_msg: + suggestions.append( + "๐Ÿ“ **SQL syntax issue:** Check quotes, commas, and parentheses" + ) + suggestions.append( + f"๐ŸŽฏ **Backend syntax:** Verify your SQL works with {self.backend_key}" + ) + suggestions.append( + "๐Ÿ’ญ **Try simpler:** Start with `SELECT * FROM table_name LIMIT 5`" + ) + if "describe" in error_msg.lower() or "show" in error_msg.lower(): + suggestions.append( + "๐Ÿ” **Schema exploration:** Use `get_table_info('table_name')` instead of DESCRIBE" + ) + suggestions.append( + "๐Ÿ“‹ **Better approach:** `get_table_info()` shows columns AND sample data" + ) + if not suggestions: + suggestions.append( + "๐Ÿ” **Start exploration:** Use `get_database_schema()` to see available tables" + ) + suggestions.append( + "๐Ÿ“‹ **Check structure:** Use `get_table_info('table_name')` to understand the data" + ) + suggestion_text = "\n".join(f" {s}" for s in suggestions) + return f"โŒ **Query Failed:** {e}\n\n๐Ÿ› ๏ธ **How to fix this:**\n{suggestion_text}\n\n๐ŸŽฏ **Quick Recovery Steps:**\n1. `get_database_schema()` โ† See what tables exist\n2. `get_table_info('your_table')` โ† Check exact column names\n3. Retry your query with correct names\n\n๐Ÿ“š **Current Backend:** {self.backend_key} - table names and syntax are backend-specific" + + def get_icu_stays(patient_id: Optional[int] = None, limit: int = 10) -> str: + """๐Ÿฅ Get ICU stay information and length of stay data. + + **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. + **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. + + **What you'll get:** Patient IDs, admission times, length of stay, and ICU details. + + Args: + patient_id: Specific patient ID to query (optional) + limit: Maximum number of records to return (default: 10) + + Returns: + ICU stay data as formatted text or guidance if table not found + """ + if not validate_limit(limit): + return "Error: Invalid limit. Must be a positive integer between 1 and 1000." + icustays_table = self.table_names["icustays"] + if patient_id: + query = ( + f"SELECT * FROM {icustays_table} WHERE subject_id = {patient_id}" + ) + else: + query = f"SELECT * FROM {icustays_table} LIMIT {limit}" + result = self.backends[self.backend_key].execute(query) + if "error" in result.lower() or "not found" in result.lower(): + return f"โŒ **Convenience function failed:** {result}\n\n๐Ÿ’ก **For reliable results, use the proper workflow:**\n1. `get_database_schema()` โ† See actual table names\n2. `get_table_info('table_name')` โ† Understand structure\n3. `execute_mimic_query('your_sql')` โ† Use exact names\n\nThis ensures compatibility across different MIMIC-IV setups." + return result + + def get_lab_results( + patient_id: Optional[int] = None, + lab_item: Optional[str] = None, + limit: int = 20, + ) -> str: + """๐Ÿงช Get laboratory test results quickly. + + **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. + **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. + + **What you'll get:** Lab values, timestamps, patient IDs, and test details. + + Args: + patient_id: Specific patient ID to query (optional) + lab_item: Lab item to search for in the value field (optional) + limit: Maximum number of records to return (default: 20) + + Returns: + Lab results as formatted text or guidance if table not found + """ + if not validate_limit(limit): + return "Error: Invalid limit. Must be a positive integer between 1 and 1000." + labevents_table = self.table_names["labevents"] + conditions = [] + if patient_id: + conditions.append(f"subject_id = {patient_id}") + if lab_item: + escaped_lab_item = lab_item.replace("'", "''") + conditions.append(f"value LIKE '%{escaped_lab_item}%'") + base_query = f"SELECT * FROM {labevents_table}" + if conditions: + base_query += " WHERE " + " AND ".join(conditions) + base_query += f" LIMIT {limit}" + result = self.backends[self.backend_key].execute(base_query) + if "error" in result.lower() or "not found" in result.lower(): + return f"โŒ **Convenience function failed:** {result}\n\n๐Ÿ’ก **For reliable results, use the proper workflow:**\n1. `get_database_schema()` โ† See actual table names\n2. `get_table_info('table_name')` โ† Understand structure\n3. `execute_mimic_query('your_sql')` โ† Use exact names\n\nThis ensures compatibility across different MIMIC-IV setups." + return result + + def get_race_distribution(limit: int = 10) -> str: + """๐Ÿ“Š Get race distribution from hospital admissions. + + **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. + **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. + + **What you'll get:** Count of patients by race category, ordered by frequency. + + Args: + limit: Maximum number of race categories to return (default: 10) + + Returns: + Race distribution as formatted text or guidance if table not found + """ + if not validate_limit(limit): + return "Error: Invalid limit. Must be a positive integer between 1 and 1000." + admissions_table = self.table_names["admissions"] + query = f"SELECT race, COUNT(*) as count FROM {admissions_table} GROUP BY race ORDER BY count DESC LIMIT {limit}" + result = self.backends[self.backend_key].execute(query) + if "error" in result.lower() or "not found" in result.lower(): + return f"โŒ **Convenience function failed:** {result}\n\n๐Ÿ’ก **For reliable results, use the proper workflow:**\n1. `get_database_schema()` โ† See actual table names\n2. `get_table_info('table_name')` โ† Understand structure\n3. `execute_mimic_query('your_sql')` โ† Use exact names\n\nThis ensures compatibility across different MIMIC-IV setups." + return result + + actions_list = [ + get_database_schema, + get_table_info, + execute_mimic_query, + get_icu_stays, + get_lab_results, + get_race_distribution, + ] + if self.auth: + actions_list = [self.auth.decorator(action) for action in actions_list] + return actions_list + + def _set_required_env_vars(self, backend_key: str) -> None: + self.required_env_vars = {} + + def add_required_vars(section_vars: List[Dict[str, Any]]) -> None: + for var in section_vars: + if var.get("required", False): + key = var["key"] + default = var.get("default", None) + self.required_env_vars[key] = default + + add_required_vars(self.env_config.get("core", [])) + + backend_section = self.env_config.get("backends", {}).get(backend_key, []) + add_required_vars(backend_section) + + enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + if enabled: + add_required_vars(self.env_config.get("oauth2", [])) + + logger.debug( + f"Set {len(self.required_env_vars)} required env vars for backend '{backend_key}', oauth enabled: {enabled}" + ) + + def _set_backends(self, backends: List[BackendBase]) -> None: + self.backends = { + b.__class__.__name__.lower().replace("backend", ""): b for b in backends + } + + def _set_auth(self) -> None: + enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + self.auth = Auth(self.config) if enabled else None + + def _validate_backend_key(self, backend_key: str) -> None: + if backend_key not in self.backends: + raise M3ValidationError(f"Invalid backend key: {backend_key}") + + def _initialize(self) -> None: + self.table_names = {} + if self.backend_key == "sqlite": + env_vars = { + "icustays": ("M3_ICUSTAYS_TABLE", "icu_icustays"), + "labevents": ("M3_LABEVENTS_TABLE", "hosp_labevents"), + "admissions": ("M3_ADMISSIONS_TABLE", "hosp_admissions"), + } + self.table_names = { + key: self.config.get_env_var(*env) for key, env in env_vars.items() + } + else: + prefix = self.config.get_env_var( + "M3_BIGQUERY_PREFIX", "`physionet-data.mimiciv_3_1_" + ) + self.table_names = { + "icustays": f"{prefix}icu.icustays`", + "labevents": f"{prefix}hosp.labevents`", + "admissions": f"{prefix}hosp.admissions`", + } + + def _get_backend_info(self) -> str: + if "sqlite" in self.backend_key.lower(): + return f"๐Ÿ”ง **Current Backend:** SQLite (local database)\n๐Ÿ“ **Database Path:** {self.backends[self.backend_key].path}\n" + else: + return f"๐Ÿ”ง **Current Backend:** BigQuery (cloud database)\nโ˜๏ธ **Project ID:** {self.backends[self.backend_key].project}\n" + + def _is_safe_query(self, sql_query: str) -> Tuple[bool, str]: + if not sql_query or not sql_query.strip(): + return False, "Empty query" + parsed = sqlparse.parse(sql_query.strip()) + if not parsed: + return False, "Invalid SQL syntax" + if len(parsed) > 1: + return False, "Multiple statements not allowed" + statement = parsed[0] + statement_type = statement.get_type() + if statement_type not in ("SELECT", "UNKNOWN"): + return False, "Only SELECT and PRAGMA queries allowed" + sql_upper = sql_query.strip().upper() + if sql_upper.startswith("PRAGMA"): + return True, "Safe PRAGMA statement" + if not self.security_config: + self.security_config = load_security_config() + dangerous_keywords = set(self.security_config.get("dangerous_keywords", [])) + for keyword in dangerous_keywords: + if f" {keyword} " in f" {sql_upper} ": + return False, f"Write operation not allowed: {keyword}" + injection_patterns = self.security_config.get("injection_patterns", []) + for pattern, description in injection_patterns: + if pattern.upper() in sql_upper: + return False, f"Injection pattern detected: {description}" + suspicious_names = set(self.security_config.get("suspicious_names", [])) + for name in suspicious_names: + if name.upper() in sql_upper: + return ( + False, + f"Suspicious identifier detected: {name} (not medical data)", + ) + return True, "Safe" + + def _post_load(self) -> None: + self.data_io = DataIO(self.config) + enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + self.auth = Auth(self.config) if enabled else None diff --git a/src/m3/tools/registry.py b/src/m3/tools/registry.py new file mode 100644 index 0000000..3aa3e7a --- /dev/null +++ b/src/m3/tools/registry.py @@ -0,0 +1,75 @@ +import importlib +import inspect +import logging +import os + +from beartype import beartype + +from m3.core.tool.base import BaseTool +from m3.core.tool.cli.base import BaseToolCLI +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + +TOOLS_DIR = os.path.dirname(__file__) + +ALL_TOOLS = {} + + +@beartype +def _initialize() -> None: + """ + Automatically discover and register tools from subdirectories in tools/. + """ + for entry in os.scandir(TOOLS_DIR): + if entry.is_dir() and not entry.name.startswith("_"): + tool_name = entry.name.lower() + try: + main_module_path = f"m3.tools.{tool_name}.{tool_name}" + main_module = importlib.import_module(main_module_path) + + tool_classes = [ + obj + for name, obj in inspect.getmembers(main_module) + if inspect.isclass(obj) + and issubclass(obj, BaseTool) + and obj != BaseTool + ] + if len(tool_classes) != 1: + raise M3ValidationError( + f"Tool '{tool_name}' must have exactly one subclass of BaseTool in {main_module_path}.py. Found: {len(tool_classes)}" + ) + tool_class = tool_classes[0] + + cli_module_path = f"m3.tools.{tool_name}.cli" + cli_module = importlib.import_module(cli_module_path) + + cli_classes = [ + obj + for name, obj in inspect.getmembers(cli_module) + if inspect.isclass(obj) + and issubclass(obj, BaseToolCLI) + and obj != BaseToolCLI + ] + if len(cli_classes) != 1: + raise M3ValidationError( + f"Tool '{tool_name}' must have exactly one subclass of BaseToolCLI in {cli_module_path}.py. Found: {len(cli_classes)}" + ) + + ALL_TOOLS[tool_name] = tool_class + except ImportError as e: + logger.warning( + f"Failed to import modules for tool '{tool_name}': {e!s}. Skipping registration (components not fully available)." + ) + except M3ValidationError as e: + logger.warning( + f"Validation failed for tool '{tool_name}': {e!s}. Skipping registration (BaseTool or BaseToolCLI not available as required)." + ) + except Exception as e: + logger.error( + f"Unexpected error discovering tool '{tool_name}': {e!s}. Skipping registration.", + exc_info=True, + ) + + +_initialize() From 6b46759b5c3ceed062c100156a9b68bd247a731d Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:49:32 +0100 Subject: [PATCH 09/15] refactor: update CLI given new architecture --- src/m3/cli.py | 1129 ++++++++++++++++++++++++++++++------------------- 1 file changed, 695 insertions(+), 434 deletions(-) diff --git a/src/m3/cli.py b/src/m3/cli.py index 9198dfa..1eadae6 100644 --- a/src/m3/cli.py +++ b/src/m3/cli.py @@ -1,478 +1,739 @@ +import importlib +import inspect +import json import logging -import sqlite3 -import subprocess +import os import sys from pathlib import Path -from typing import Annotated +from typing import no_type_check +import rich.box as box import typer +from beartype import beartype +from beartype.typing import Annotated, Dict, Optional, Type +from rich.cells import cell_len +from rich.console import Console, Group +from rich.panel import Panel +from rich.table import Table +from rich.text import Text +from rich_pyfiglet import RichFiglet +from thefuzz import process from m3 import __version__ -from m3.config import ( - SUPPORTED_DATASETS, - get_dataset_config, - get_dataset_raw_files_path, - get_default_database_path, - logger, -) -from m3.data_io import initialize_dataset - -app = typer.Typer( - name="m3", - help="M3 CLI: Initialize local clinical datasets like MIMIC-IV Demo.", - add_completion=False, - rich_markup_mode="markdown", -) - - -def version_callback(value: bool): - if value: - typer.echo(f"M3 CLI Version: {__version__}") - raise typer.Exit() - - -@app.callback() -def main_callback( - version: Annotated[ - bool, - typer.Option( - "--version", - "-v", - callback=version_callback, - is_eager=True, - help="Show CLI version.", - ), - ] = False, - verbose: Annotated[ - bool, - typer.Option( - "--verbose", "-V", help="Enable DEBUG level logging for m3 components." - ), - ] = False, -): - """ - Main callback for the M3 CLI. Sets logging level. - """ - m3_logger = logging.getLogger("m3") # Get the logger from config.py - if verbose: - m3_logger.setLevel(logging.DEBUG) - for handler in m3_logger.handlers: # Ensure handlers also respect the new level - handler.setLevel(logging.DEBUG) - logger.debug("Verbose mode enabled via CLI flag.") - else: - # Default to INFO as set in config.py - m3_logger.setLevel(logging.INFO) - for handler in m3_logger.handlers: - handler.setLevel(logging.INFO) - - -@app.command("init") -def dataset_init_cmd( - dataset_name: Annotated[ - str, - typer.Argument( - help=( - "Dataset to initialize. Default: 'mimic-iv-demo'. " - f"Supported: {', '.join(SUPPORTED_DATASETS.keys())}" - ), - metavar="DATASET_NAME", - ), - ] = "mimic-iv-demo", - db_path_str: Annotated[ - str | None, - typer.Option( - "--db-path", - "-p", - help="Custom path for the SQLite DB. Uses a default if not set.", - ), - ] = None, -): - """ - Download a supported dataset (e.g., 'mimic-iv-demo') and load it into a local SQLite - - Raw downloaded files are stored in a `m3_data/raw_files//` subdirectory - and are **not** deleted after processing. - The SQLite database is stored in `m3_data/databases/` or path specified by `--db-path`. - """ - logger.info(f"CLI 'init' called for dataset: '{dataset_name}'") +from m3.core.config import M3Config +from m3.core.preset.registry import ALL_PRESETS +from m3.core.tool.cli.base import BaseToolCLI +from m3.core.utils.exceptions import M3ConfigError, M3PresetError, M3ValidationError +from m3.core.utils.logging import setup_logging +from m3.m3 import M3 +from m3.tools.registry import ALL_TOOLS - dataset_key = dataset_name.lower() # Normalize for lookup - dataset_config = get_dataset_config(dataset_key) +logger = logging.getLogger(__name__) - if not dataset_config: - typer.secho( - f"Error: Dataset '{dataset_name}' is not supported or not configured.", - fg=typer.colors.RED, - err=True, - ) - typer.secho( - f"Supported datasets are: {', '.join(SUPPORTED_DATASETS.keys())}", - fg=typer.colors.YELLOW, - err=True, - ) - raise typer.Exit(code=1) - - # Currently, only mimic-iv-demo is fully wired up as an example. - # This check can be removed or adapted as more datasets are supported. - if dataset_key != "mimic-iv-demo": - typer.secho( - ( - f"Warning: While '{dataset_name}' is configured, only 'mimic-iv-demo' " - "is fully implemented for initialization in this version." - ), - fg=typer.colors.YELLOW, - ) - - final_db_path = ( - Path(db_path_str).resolve() - if db_path_str - else get_default_database_path(dataset_key) - ) - if not final_db_path: - typer.secho( - f"Critical Error: Could not determine database path for '{dataset_name}'.", - fg=typer.colors.RED, - err=True, - ) - raise typer.Exit(code=1) - - # Ensure parent directory for the database exists - final_db_path.parent.mkdir(parents=True, exist_ok=True) - - raw_files_storage_path = get_dataset_raw_files_path( - dataset_key - ) # Will be created if doesn't exist - typer.echo(f"Initializing dataset: '{dataset_name}'") - typer.echo(f"Target database path: {final_db_path}") - typer.echo(f"Raw files will be stored at: {raw_files_storage_path.resolve()}") - - initialization_successful = initialize_dataset( - dataset_name=dataset_key, db_target_path=final_db_path - ) - - if not initialization_successful: - typer.secho( - ( - f"Dataset '{dataset_name}' initialization FAILED. " - "Please check logs for details." - ), - fg=typer.colors.RED, - err=True, - ) - raise typer.Exit(code=1) - - logger.info( - f"Dataset '{dataset_name}' initialization seems complete. " - "Verifying database integrity..." - ) - - # Basic verification by querying a known table - verification_table_name = dataset_config.get("primary_verification_table") - if not verification_table_name: - logger.warning( - f"No 'primary_verification_table' configured for '{dataset_name}'. " - "Skipping DB query test." - ) - typer.secho( - ( - f"Dataset '{dataset_name}' initialized to {final_db_path}. " - f"Raw files at {raw_files_storage_path.resolve()}." - ), - fg=typer.colors.GREEN, - ) - typer.secho( - "Skipped database query test as no verification table is set in config.", - fg=typer.colors.YELLOW, - ) - return - - try: - conn = sqlite3.connect(final_db_path) - cursor = conn.cursor() - # A simple count query is usually safe and informative. - query = f"SELECT COUNT(*) FROM {verification_table_name};" - logger.debug(f"Executing verification query: '{query}' on {final_db_path}") - cursor.execute(query) - count_result = cursor.fetchone() - conn.close() - - if count_result is None: - raise sqlite3.Error( - f"Query on table '{verification_table_name}' returned no result (None)." - ) +console = Console() - record_count = count_result[0] - typer.secho( - ( - f"Database verification successful: Found {record_count} records in " - f"table '{verification_table_name}'." - ), - fg=typer.colors.GREEN, - ) - typer.secho( - ( - f"Dataset '{dataset_name}' ready at {final_db_path}. " - f"Raw files at {raw_files_storage_path.resolve()}." - ), - fg=typer.colors.BRIGHT_GREEN, - ) - except sqlite3.Error as e: - logger.error( - ( - f"SQLite error during verification query on table " - f"'{verification_table_name}': {e}" - ), - exc_info=True, - ) - typer.secho( - ( - f"Error verifying table '{verification_table_name}': {e}. " - f"The database was created at {final_db_path}, but the test query " - "failed. The data might be incomplete or corrupted." - ), - fg=typer.colors.RED, - err=True, - ) - except Exception as e: # Catch any other unexpected errors - logger.error( - f"Unexpected error during database verification: {e}", exc_info=True - ) - typer.secho( - f"An unexpected error occurred during database verification: {e}", - fg=typer.colors.RED, - err=True, - ) +@beartype +class M3CLI: + """M3 Command Line Interface (M3-CLI), manages all M3 supported MCP tools and servers. -@app.command("config") -def config_cmd( - client: Annotated[ - str | None, - typer.Argument( - help="MCP client to configure. Use 'claude' for Claude Desktop auto-setup, or omit for universal config generator.", - metavar="CLIENT", - ), - ] = None, - backend: Annotated[ - str, - typer.Option( - "--backend", - "-b", - help="Backend to use (sqlite or bigquery). Default: sqlite", - ), - ] = "sqlite", - db_path: Annotated[ - str | None, - typer.Option( - "--db-path", - "-p", - help="Path to SQLite database (for sqlite backend)", - ), - ] = None, - project_id: Annotated[ - str | None, - typer.Option( - "--project-id", - help="Google Cloud project ID (required for bigquery backend)", - ), - ] = None, - python_path: Annotated[ - str | None, - typer.Option( - "--python-path", - help="Path to Python executable", - ), - ] = None, - working_directory: Annotated[ - str | None, - typer.Option( - "--working-directory", - help="Working directory for the server", - ), - ] = None, - server_name: Annotated[ - str, - typer.Option( - "--server-name", - help="Name for the MCP server", - ), - ] = "m3", - output: Annotated[ - str | None, - typer.Option( - "--output", - "-o", - help="Save configuration to file instead of printing", - ), - ] = None, - quick: Annotated[ - bool, - typer.Option( - "--quick", - "-q", - help="Use quick mode with provided arguments (non-interactive)", - ), - ] = False, -): - """ - Configure M3 MCP server for various clients. + Provides a command-line interface to interact with M3's modular MCP tools and servers. + If you would like to do it programmatically, you can use the `M3` class directly. - Examples: + CLI Usage Examples: + ```bash + # Show CLI version + m3 --version - โ€ข m3 config # Interactive universal config generator + # List available presets + m3 list-presets - โ€ข m3 config claude # Auto-configure Claude Desktop + # List available tools + m3 list-tools - โ€ข m3 config --quick # Quick universal config with defaults + # Run M3 with default preset (starts the MCP server) + m3 run --presets default_m3 - โ€ข m3 config claude --backend bigquery --project-id my-project - """ - try: - from m3 import mcp_client_configs - - script_dir = Path(mcp_client_configs.__file__).parent - except ImportError: - typer.secho( - "โŒ Error: Could not find m3.mcp_client_configs package", - fg=typer.colors.RED, - err=True, - ) - raise typer.Exit(code=1) - - # Validate backend-specific arguments - if backend == "sqlite" and project_id: - typer.secho( - "โŒ Error: --project-id can only be used with --backend bigquery", - fg=typer.colors.RED, - err=True, - ) - raise typer.Exit(code=1) + # Run with custom pipeline JSON (assuming custom_pipeline.json exists and is a M3 saved pipeline) + m3 run --pipeline custom_pipeline.json - if backend == "bigquery" and db_path: - typer.secho( - "โŒ Error: --db-path can only be used with --backend sqlite", - fg=typer.colors.RED, - err=True, - ) - raise typer.Exit(code=1) - - # Require project_id for BigQuery backend - if backend == "bigquery" and not project_id: - typer.secho( - "โŒ Error: --project-id is required when using --backend bigquery", - fg=typer.colors.RED, - err=True, - ) - raise typer.Exit(code=1) + # Build M3 config without running + m3 build --presets default_m3 --output m3_config.json - if client == "claude": - # Run the Claude Desktop setup script - script_path = script_dir / "setup_claude_desktop.py" + # Build with multiple presets + m3 build --presets preset1,preset2 --config-type claude --output m3_config.json # By default, --output is not mandatory. - if not script_path.exists(): - typer.secho( - f"Error: Claude Desktop setup script not found at {script_path}", - fg=typer.colors.RED, - err=True, - ) - raise typer.Exit(code=1) + # Search for presets or M3-supported MCP tools matching the search query + m3 search mimic-IV # Will output matching presets and tools - # Build command arguments - cmd = [sys.executable, str(script_path)] + # Add a tool to a newly-created pipeline + m3 pipeline mimic --new-pipeline custom_pipeline.json - if backend != "sqlite": - cmd.extend(["--backend", backend]) + # Add a tool to an existing pipeline + m3 pipeline mimic --to-pipeline existing_pipeline.json - if backend == "sqlite" and db_path: - cmd.extend(["--db-path", db_path]) - elif backend == "bigquery" and project_id: - cmd.extend(["--project-id", project_id]) + # Tool-specific subcommands (e.g., for mimic tool in this very example) โ€” `m3 tools ` + m3 tools mimic init --dataset mimic-iv-demo --db-path demo.db + m3 tools mimic configure --backend sqlite --enable-oauth2 # If you do not specify --backend, it'll launch CLI-interactive configuration. + m3 tools mimic status # More used internally, but you can use it to check the status of the mimic tool if env vars are applied. + m3 tools mimic --help + """ + def __init__(self) -> None: + self._display_banner() + self.app: typer.Typer = typer.Typer( + help="\n\n", + add_completion=False, + pretty_exceptions_show_locals=False, + rich_markup_mode="markdown", + ) + self.tools_app: typer.Typer = typer.Typer(help="Tool-specific commands.") + self.app.callback()(self.main_callback) + self.app.command( + help="List all available `M3` presets, which are pre-configured pipelines ready to run out-of-the-box." + )(self.list_presets) + self.app.command( + help="List all available tools supported by `M3` for integration into MCP pipelines." + )(self.list_tools) + self.app.command( + help="`Run` the `M3` `fastMCP` instance (from `--presets` or a `pipeline` config), build the MCP server config, and start the server (defaults to `FastMCP`)." + )(self.run) + self.app.command( + help="`Build` a MCP server configuration (e.g., for `Claude Desktop` or `FastMCP`) from a `pipeline` without starting the server." + )(self.build) + self.app.command( + help="`Compose` || `extend` an `M3` pipeline by adding a M3-MCP tool and generating its configuration; Hint: follow next by `build`ing you pipeline, e.g for `Claude Desktop`." + )(self.pipeline) + self.app.add_typer( + self.tools_app, + name="tools", + help="Access tool-specific subcommands. For details, run `m3 tools --help`.", + ) + self.app.command( + help="`Search` for `M3` presets or supported MCP tools based on a query, such as `mimic-IV`." + )(self.search) + self.tool_clis = self._load_tool_clis() + + def __call__(self) -> None: + self.app() + + @staticmethod + def version_callback(value: bool) -> None: + if value: + console.print(f"[bold green]๐Ÿ’ฌ M3 CLI Version: {__version__}[/bold green]") + raise typer.Exit() + + @no_type_check + def main_callback( + self, + version: Annotated[ + bool, + typer.Option( + "--version", + "-v", + callback=version_callback.__func__, + is_eager=True, + help="Show CLI version.", + ), + ] = False, + verbose: Annotated[ + bool, + typer.Option("--verbose", "-V", help="Enable DEBUG level logging."), + ] = False, + ) -> None: + level = "DEBUG" if verbose else "INFO" + setup_logging(level=level) + if verbose: + logger.debug("Verbose mode enabled.") + + def list_presets(self) -> None: + console.print("[bold green]๐Ÿ’ฌ Available Presets[/bold green]") + table = Table(title="") + table.add_column("Preset", style="cyan") + for preset in ALL_PRESETS.keys(): + table.add_row(preset) + console.print(table) + + def list_tools(self) -> None: + console.print("[bold green]๐Ÿ’ฌ Available Tools[/bold green]") + table = Table(title="") + table.add_column("Tool", style="cyan") + for tool in ALL_TOOLS.keys(): + table.add_row(tool) + console.print(table) + + def run( + self, + pipeline: Annotated[ + Optional[str], + typer.Option("--pipeline", help="Pipeline JSON path (!= Presets)."), + ] = None, + presets: Annotated[ + Optional[str], + typer.Option( + "--presets", + help="Comma-separated M3 pipeline presets if no pipeline in hand.", + ), + ] = "default_m3", + config_type: Annotated[ + str, + typer.Option( + "--config-type", + help="Final MCP host Configuration type for build (e.g., `fastmcp`, `claude`. etc).", + ), + ] = "fastmcp", + config_path: Annotated[ + Optional[str], + typer.Option( + "--config-path", + "-c", + help="Path where to save your M3 pipeline JSON configuration (defaults to `m3_pipeline.json` or the --pipeline path if provided).", + ), + ] = None, + show_status: Annotated[ + bool, + typer.Option( + "--show-status", + help="Display tool status post-build. Mostly used internally.", + ), + ] = True, + command: Annotated[ + Optional[str], + typer.Option( + "--command", + help="Custom command for Final MCP server (e.g., `python3` or a specific path).", + ), + ] = None, + args: Annotated[ + Optional[str], + typer.Option( + "--args", + help="Comma-separated arguments for Final MCP server (e.g., `[-m,custom.module]`).", + ), + ] = None, + cwd: Annotated[ + Optional[str], + typer.Option("--cwd", help="Working directory for Final MCP server."), + ] = None, + module_name: Annotated[ + Optional[str], + typer.Option( + "--module-name", + help="Module name for default arguments (e.g., `custom.runner`).", + ), + ] = None, + ) -> None: + console.print("[bold green]๐Ÿ’ฌ Starting M3 run...[/bold green]") try: - result = subprocess.run(cmd, check=True, capture_output=False) - if result.returncode == 0: - typer.secho( - "โœ… Claude Desktop configuration completed!", fg=typer.colors.GREEN + if pipeline and config_path: + raise ValueError( + "Cannot specify both --pipeline and --config-path. When using --pipeline, the pipeline is loaded and saved back to the same file." ) - except subprocess.CalledProcessError as e: - typer.secho( - f"โŒ Claude Desktop setup failed with exit code {e.returncode}", - fg=typer.colors.RED, - err=True, + config = M3Config(env_vars=os.environ.copy()) + _config_path = config_path or pipeline or "m3_pipeline.json" + _config_path = os.path.abspath(_config_path) + if pipeline: + console.print( + f"[bold green]๐Ÿ’ฌ Loaded pipeline: {pipeline}[/bold green]" + ) + m3 = M3.load(pipeline) + else: + m3 = M3(config=config) + preset_list = [p.strip() for p in presets.split(",")] if presets else [] + for preset in preset_list: + if preset not in ALL_PRESETS: + available_presets = list(ALL_PRESETS.keys()) + best_match, score = process.extractOne( + preset, available_presets + ) or ( + None, + 0, + ) + suggestion_text = ( + f" Did you mean '{best_match}'?" if score >= 80 else "" + ) + raise M3PresetError( + f"Unknown preset: {preset}. Use `m3 list-presets`.{suggestion_text}" + ) + console.print( + f"[bold green]๐Ÿ’ฌ Applying preset '{preset}'...[/bold green]" + ) + m3 = m3.with_preset(preset) + console.print( + f"[bold green]๐Ÿ’ฌ Building M3 with config type '{config_type}'...[/bold green]" ) - raise typer.Exit(code=e.returncode) - except FileNotFoundError: - typer.secho( - "โŒ Python interpreter not found. Please ensure Python is installed.", - fg=typer.colors.RED, - err=True, + args_list = args.split(",") if args else None + m3.build( + type=config_type, + command=command, + args=args_list, + cwd=cwd, + module_name=module_name, + pipeline_config_path=_config_path, + save_path=None, ) - raise typer.Exit(code=1) - - else: - # Run the dynamic config generator - script_path = script_dir / "dynamic_mcp_config.py" - - if not script_path.exists(): - typer.secho( - f"Error: Dynamic config script not found at {script_path}", - fg=typer.colors.RED, - err=True, + m3.save(_config_path) + console.print( + f"[bold green]๐Ÿ’ฌ โœ… Saved pipeline config to {_config_path}.[/bold green]" ) - raise typer.Exit(code=1) - - # Build command arguments - cmd = [sys.executable, str(script_path)] - - if quick: - cmd.append("--quick") - - if backend != "sqlite": - cmd.extend(["--backend", backend]) + if show_status: + self._status() + console.print("[bold green]๐Ÿ’ฌ Starting M3 MCP server...[/bold green]") + m3.run() + except (M3ValidationError, M3PresetError, ValueError) as e: + logger.error(f"Run failed: {e}") + console.print(f"[red]โŒ Error: {e}[/red]") + raise typer.Exit(code=1) from e + except Exception as e: + logger.error(f"Unexpected error in run: {e}") + console.print(f"[red]โŒ Unexpected error: {e}[/red]") + raise typer.Exit(code=1) from e + + def build( + self, + pipeline: Annotated[ + Optional[str], + typer.Option("--pipeline", help="Pipeline JSON path (!= Presets)."), + ] = None, + presets: Annotated[ + Optional[str], + typer.Option( + "--presets", + help="Comma-separated M3 pipeline presets if no pipeline in hand.", + ), + ] = "default_m3", + config_type: Annotated[ + str, + typer.Option( + "--config-type", + help="Configuration type for the MCP server (e.g., `fastmcp`, `claude`, etc.).", + ), + ] = "fastmcp", + config_path: Annotated[ + Optional[str], + typer.Option( + "--config-path", + "-c", + help="Path where to save your M3 pipeline JSON configuration (defaults to `m3_pipeline.json` or the --pipeline path if provided).", + ), + ] = None, + output: Annotated[ + Optional[str], + typer.Option( + "--output", + "-o", + help="Output path for the Final MCP server configuration (defaults depends on `config_type` but could be e.g `m3_claude_config.json` for `claude`).", + ), + ] = None, + show_status: Annotated[ + bool, + typer.Option( + "--show-status", + help="Display tool status post-build. Mostly used internally.", + ), + ] = True, + command: Annotated[ + Optional[str], + typer.Option( + "--command", + help="Custom command for MCP server (e.g., `python3` or a specific path).", + ), + ] = None, + args: Annotated[ + Optional[str], + typer.Option( + "--args", + help="Comma-separated arguments for MCP server (e.g., `[-m,custom.module]`).", + ), + ] = None, + cwd: Annotated[ + Optional[str], + typer.Option("--cwd", help="Working directory for MCP server."), + ] = None, + module_name: Annotated[ + Optional[str], + typer.Option( + "--module-name", + help="Module name for default arguments (e.g., `custom.runner`).", + ), + ] = None, + ) -> None: + console.print("[bold green]๐Ÿ’ฌ Starting M3 build...[/bold green]") + try: + if pipeline and config_path: + raise ValueError( + "Cannot specify both --pipeline and --config-path. When using --pipeline, the pipeline is loaded and saved back to the same file." + ) + config = M3Config(env_vars=os.environ.copy()) + _config_path = config_path or pipeline or "m3_pipeline.json" + _config_path = os.path.abspath(_config_path) + if pipeline: + console.print( + f"[bold green]๐Ÿ’ฌ Loaded pipeline: {pipeline}[/bold green]" + ) + m3 = M3.load(pipeline) + else: + m3 = M3(config=config) + preset_list = [p.strip() for p in presets.split(",")] if presets else [] + for preset in preset_list: + if preset not in ALL_PRESETS: + available_presets = list(ALL_PRESETS.keys()) + best_match, score = process.extractOne( + preset, available_presets + ) or ( + None, + 0, + ) + suggestion_text = ( + f" Did you mean '{best_match}'?" if score >= 80 else "" + ) + raise M3PresetError( + f"Unknown preset: {preset}. Use `m3 list-presets`.{suggestion_text}" + ) + console.print( + f"[bold green]๐Ÿ’ฌ Applying preset '{preset}'...[/bold green]" + ) + m3 = m3.with_preset(preset) + _save_path = os.path.abspath(output) if output else None + console.print( + f"[bold green]๐Ÿ’ฌ Building M3 with config type '{config_type}'...[/bold green]" + ) + args_list = args.split(",") if args else None + m3.build( + type=config_type, + command=command, + args=args_list, + cwd=cwd, + module_name=module_name, + pipeline_config_path=_config_path, + save_path=_save_path, + ) + m3.save(_config_path) + console.print("[bold green]๐Ÿ’ฌ โœ… Pipeline config saved.[/bold green]") + if show_status: + self._status() + except (M3ValidationError, M3PresetError, ValueError) as e: + logger.error(f"Build failed: {e}") + console.print(f"[red]โŒ Error: {e}[/red]") + raise typer.Exit(code=1) from e + except Exception as e: + logger.error(f"Unexpected error in build: {e}") + console.print(f"[red]โŒ Unexpected error: {e}[/red]") + raise typer.Exit(code=1) from e + + def pipeline( + self, + tool_name: Annotated[ + str, + typer.Argument( + help="Tool to incorporate to your newly-designed or already-ready M3 pipeline (e.g., `mimic`)." + ), + ], + to_pipeline: Annotated[ + Optional[str], + typer.Option( + "--to-pipeline", + help="Whether or not you are adding to an existing M3 pipeline. If so, provide the path to the pipeline JSON file. It'll append the tool to the existing pipeline.", + ), + ] = None, + new_pipeline: Annotated[ + Optional[str], + typer.Option( + "--new-pipeline", + help="Whether or not you are creating a new M3 pipeline. If so, provide the path to the new pipeline JSON file (defaults to `m3_pipeline.json`).", + ), + ] = "m3_pipeline.json", + tool_config: Annotated[ + Optional[str], + typer.Option( + "--tool-config", + help="Path to pre-generated tool config JSON (from 'm3 tools configure --output ...'). If provided, uses this instead of interactive configuration.", + ), + ] = None, + ) -> None: + console.print( + f"[bold green]๐Ÿ’ฌ Adding tool '{tool_name}' to pipeline...[/bold green]" + ) + if tool_name not in ALL_TOOLS: + raise M3ValidationError(f"Unknown tool: {tool_name}. Use `m3 list-tools`.") - if server_name != "m3": - cmd.extend(["--server-name", server_name]) + try: + tool_cli_class = self._get_tool_cli_class(tool_name) + + if tool_config: + if not Path(tool_config).exists(): + raise M3ValidationError( + f"Tool config file not found: {tool_config}" + ) + with open(tool_config) as f: + tool_dict = json.load(f) + else: + tool_dict = tool_cli_class.configure() + + pipeline_path = to_pipeline or new_pipeline + if to_pipeline and Path(to_pipeline).exists(): + m3: M3 = M3.load(to_pipeline) + console.print(f"[bold green]๐Ÿ’ฌ Appending to {to_pipeline}[/bold green]") + else: + m3 = M3() + console.print( + f"[bold green]๐Ÿ’ฌ Creating new pipeline at {pipeline_path}[/bold green]" + ) - if python_path: - cmd.extend(["--python-path", python_path]) + prefixed_env = { + f"{key}": value # In the future, to avoid tools-vars conflicts, we could f"{tool_name.upper()}_{key}" + for key, value in tool_dict.get("env_vars", {}).items() + } - if working_directory: - cmd.extend(["--working-directory", working_directory]) + m3.config.merge_env(prefixed_env) - if backend == "sqlite" and db_path: - cmd.extend(["--db-path", db_path]) - elif backend == "bigquery" and project_id: - cmd.extend(["--project-id", project_id]) + tool_cls = ALL_TOOLS[tool_name] + tool_params = tool_dict.get("tool_params", {}) + tool = tool_cls.from_dict(tool_params) + m3 = m3.with_tool(tool) - if output: - cmd.extend(["--output", output]) + m3.build() + m3.save(pipeline_path) + console.print( + f"[bold green]๐Ÿ’ฌ โœ… Pipeline updated: {pipeline_path} (tools: {len(m3.tools)})[/bold green]" + ) + except M3ConfigError as e: + logger.error(f"Env merge failed: {e}") + console.print(f"[red]โŒ Env conflict: {e}[/red]") + raise typer.Exit(1) from e + except M3ValidationError as e: + logger.error(f"Validation failed: {e}") + console.print(f"[red]โŒ {e}[/red]") + raise typer.Exit(1) from e + except Exception as e: + logger.error(f"Failed to add {tool_name}: {e}", exc_info=True) + console.print(f"[red]โŒ Failed to add {tool_name}: {e}[/red]") + raise typer.Exit(1) from e + + def search( + self, + query: Annotated[ + str, + typer.Argument(help="Search query for presets or tools. E.g., `mimic-IV`."), + ], + type_: Annotated[ + str, + typer.Option( + "--type", help="Search type: `presets` or `tools` (default: both)." + ), + ] = "both", + limit: Annotated[ + int, + typer.Option( + "--limit", help="Number of results to display. This is very optional." + ), + ] = 5, + ) -> None: + console.print(f"[bold green]๐Ÿ’ฌ Searching for '{query}'...[/bold green]") + if type_ not in ["presets", "tools", "both"]: + console.print( + "[red]โŒ Invalid type. Use `presets`, `tools`, or `both`.[/red]" + ) + raise typer.Exit(code=1) - if quick: - typer.echo("๐Ÿ”ง Generating M3 MCP configuration...") - else: - typer.echo("๐Ÿ”ง Starting interactive M3 MCP configuration...") + results = [] + if type_ in ["presets", "both"]: + presets = list(ALL_PRESETS.keys()) + preset_matches = process.extract(query, presets, limit=limit) + results.append(("Presets", preset_matches)) + if type_ in ["tools", "both"]: + tools = list(ALL_TOOLS.keys()) + tool_matches = process.extract(query, tools, limit=limit) + results.append(("Tools", tool_matches)) + + for category, matches in results: + table = Table(title=f"[bold green]๐Ÿ’ฌ {category} matches[/bold green]") + table.add_column("Match", style="cyan") + table.add_column("Score", style="magenta") + for match, score in matches: + table.add_row(match, str(score)) + console.print(table) + + def _load_tool_clis(self) -> Dict[str, typer.Typer]: + tool_clis = {} + for tool_name in ALL_TOOLS: + if cli := self._load_tool_cli(tool_name): + tool_clis[tool_name] = cli + self.tools_app.add_typer( + cli, + name=tool_name, + help=f"{tool_name.capitalize()} tool commands.", + ) + if not tool_clis: + raise M3ValidationError( + "At least one tool CLI must be available to use M3's CLI." + ) + return tool_clis + @staticmethod + def _load_tool_cli(tool_name: str) -> Optional[typer.Typer]: + try: + module_path = f"m3.tools.{tool_name}.cli" + module = importlib.import_module(module_path) + tool_cli_classes = [ + obj + for name, obj in inspect.getmembers(module) + if inspect.isclass(obj) + and issubclass(obj, BaseToolCLI) + and obj != BaseToolCLI + ] + if not tool_cli_classes: + logger.warning( + f"No subclass of BaseToolCLI found in module for '{tool_name}'." + ) + return None + if len(tool_cli_classes) > 1: + raise M3ValidationError( + f"Multiple BaseToolCLI subclasses found in module for '{tool_name}'." + ) + tool_cli_class = tool_cli_classes[0] + app = tool_cli_class.get_app() + if not app: + logger.warning(f"Tool '{tool_name}' returned None for get_app().") + return None + logger.debug(f"Loaded CLI for tool '{tool_name}'.") + if not hasattr(tool_cli_class, "status"): + raise M3ValidationError( + f"Tool '{tool_name}' must implement 'status' method." + ) + if not hasattr(tool_cli_class, "init"): + logger.debug(f"Tool '{tool_name}' does not support 'init'.") + if not hasattr(tool_cli_class, "configure"): + raise M3ValidationError( + f"Tool '{tool_name}' must implement 'configure' method." + ) + return app + except ImportError as e: + logger.debug(f"No CLI for tool '{tool_name}': {e}") + return None + except Exception as e: + logger.error(f"Failed to load CLI for '{tool_name}': {e}") + return None + + @staticmethod + def _get_tool_cli_class(tool_name: str) -> Type[BaseToolCLI]: try: - result = subprocess.run(cmd, check=True, capture_output=False) - if result.returncode == 0 and quick: - typer.secho( - "โœ… Configuration generated successfully!", fg=typer.colors.GREEN + module_path = f"m3.tools.{tool_name}.cli" + module = importlib.import_module(module_path) + tool_cli_classes = [ + obj + for name, obj in inspect.getmembers(module) + if inspect.isclass(obj) + and issubclass(obj, BaseToolCLI) + and obj != BaseToolCLI + ] + if not tool_cli_classes: + raise M3ValidationError( + f"No subclass of BaseToolCLI found in module for '{tool_name}'." + ) + if len(tool_cli_classes) > 1: + raise M3ValidationError( + f"Multiple BaseToolCLI subclasses found in module for '{tool_name}'." ) - except subprocess.CalledProcessError as e: - typer.secho( - f"โŒ Configuration generation failed with exit code {e.returncode}", - fg=typer.colors.RED, - err=True, + return tool_cli_classes[0] + except ImportError as e: + logger.error(f"Failed to import CLI module for '{tool_name}': {e}") + raise M3ValidationError( + f"CLI module import failed for '{tool_name}': {e}" + ) from e + except Exception as e: + logger.error( + f"Unexpected error loading CLI class for '{tool_name}': {e}", + exc_info=True, ) - raise typer.Exit(code=e.returncode) - except FileNotFoundError: - typer.secho( - "โŒ Python interpreter not found. Please ensure Python is installed.", - fg=typer.colors.RED, - err=True, + raise M3ValidationError( + f"Unexpected error loading CLI for '{tool_name}': {e}" + ) from e + + def _status( + self, + tool: Optional[str] = None, + verbose: bool = False, + ) -> None: + console.print("[bold green]๐Ÿ’ฌ Checking status...[/bold green]") + if tool and tool not in self.tool_clis: + available_tools = list(self.tool_clis.keys()) + best_match, score = process.extractOne(tool, available_tools) or (None, 0) + suggestion_text = f" Did you mean '{best_match}'?" if score >= 80 else "" + console.print( + f"[red]โŒ Unknown tool: {tool}. Use `m3 list-tools`.{suggestion_text}[/red]" ) raise typer.Exit(code=1) + tools_to_check = [tool] if tool else list(self.tool_clis.keys()) + + for _tool in tools_to_check: + try: + tool_cli_class = self._get_tool_cli_class(_tool) + tool_cli_class.status(verbose=verbose) + except M3ValidationError as e: + logger.error(f"Failed to load CLI class for '{_tool}': {e}") + console.print(f"[red]โŒ Error loading CLI for '{_tool}': {e}[/red]") + except Exception as e: + logger.error(f"Status failed for '{_tool}': {e}", exc_info=True) + console.print(f"[red]โŒ Error getting status for '{_tool}': {e}[/red]") + + @staticmethod + def _display_banner() -> None: + if any(arg in sys.argv for arg in ["--help", "-h"]): + rich_fig = RichFiglet( + "M3", + font="ansi_shadow", + colors=["#750014", "#750014", "#750014", "#FFFFFF", "#FFFFFF"], + horizontal=True, + remove_blank_lines=True, + ) + entries = [ + ("๐Ÿ—‚๏ธ", " Repo", "https://github.com/rafiattrach/m3"), + ("๐Ÿ“š", "Documentation", "https://rafiattrach.github.io/m3/"), + ("๐Ÿ“„", "Paper", "https://arxiv.org/abs/2507.01053"), + ("๐ŸŽ๏ธ", " Version", __version__), + ] + max_label_len = max( + cell_len(emoji + " " + key + ":") for emoji, key, value in entries + ) + group_items = [ + Text(""), + Text(""), + rich_fig, + Text(""), + Text( + "Simplifying secure clinical data access with conversational AI โ€” M3 ", + style="bold italic turquoise4", + ), + Text(""), + ] + for i, (emoji, key, value) in enumerate(entries): + label_plain = emoji + " " + key + ":" + label_len = cell_len(label_plain) + spaces = " " * (max_label_len - label_len + 2) + line = f"[turquoise4]{label_plain}[/turquoise4]{spaces}{value}" + group_items.append(Text.from_markup(line)) + if i == 1: + group_items.append(Text("")) + group_items += [Text(""), Text("")] + content = Group(*group_items) + console.print( + Panel( + content, + title="M3 CLI", + width=80, + title_align="left", + expand=False, + box=box.ROUNDED, + padding=(1, 5), + ) + ) + + +def main_cli() -> None: + M3CLI()() + if __name__ == "__main__": - app() + main_cli() From c064dd4784aa123fa2380732798a58273555841f Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:50:20 +0100 Subject: [PATCH 10/15] feat: add core M3 w/ chaining-API-style class --- src/m3/__init__.py | 32 +++++- src/m3/m3.py | 258 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 286 insertions(+), 4 deletions(-) create mode 100644 src/m3/m3.py diff --git a/src/m3/__init__.py b/src/m3/__init__.py index 393f13d..58d7341 100644 --- a/src/m3/__init__.py +++ b/src/m3/__init__.py @@ -1,5 +1,29 @@ -""" -MIMIC-IV + MCP + Models (M3): Local MIMIC-IV querying with LLMs via Model Context Protocol -""" - __version__ = "0.2.0" + +from .cli import M3CLI +from .core.config import M3Config +from .core.utils.exceptions import ( + AuthenticationError, + M3BuildError, + M3ConfigError, + M3Error, + M3InitializationError, + M3PresetError, + M3ValidationError, + TokenValidationError, +) +from .m3 import M3 + +__all__ = [ + "M3", + "M3CLI", + "AuthenticationError", + "M3BuildError", + "M3Config", + "M3ConfigError", + "M3Error", + "M3InitializationError", + "M3PresetError", + "M3ValidationError", + "TokenValidationError", +] diff --git a/src/m3/m3.py b/src/m3/m3.py new file mode 100644 index 0000000..04156d4 --- /dev/null +++ b/src/m3/m3.py @@ -0,0 +1,258 @@ +import json +import logging +import os + +from beartype import beartype +from beartype.typing import Any, List, Optional, Union +from fastmcp import FastMCP +from thefuzz import process + +from m3.core.config import M3Config +from m3.core.mcp_config_generator.registry import ALL_MCP_CONFIG_GENERATORS +from m3.core.tool.base import BaseTool +from m3.core.utils.exceptions import ( + M3BuildError, + M3ConfigError, + M3InitializationError, + M3PresetError, + M3ValidationError, +) +from m3.tools.registry import ALL_TOOLS + +logger = logging.getLogger(__name__) + + +@beartype +class M3: + """M3 core for composing MCP-M3 supported tools with ease-of-use using chaining-style API. + + This class provides an API to configure M3 with config and tools. Tools supported and validated by M3. + + Examples: + >>> m3 = ( + ... M3() + ... .with_config(M3Config(log_level="DEBUG")) # More is available, refer to M3Config's documentation + ... .with_tool(MIMIC(backends=[SQLiteBackend(path="db.sqlite")])) # More is available, refer to MIMIC's documentation + ... # more chaining methods exists (e.g. with_tools, with_preset, etc.), + ... ) + >>> config = m3.build(type="claude") # Setup M3 directly into Claude Desktop MCP + >>> m3.save("m3_pipeline.json") # Serialize your just-created M3 pipeline to JSON + >>> loaded_m3 = M3.load("m3_pipeline.json").run() # Load it later / share it to colleagues and start MCP server + """ + + def __init__( + self, + config: Optional[M3Config] = None, + mcp: Optional[FastMCP] = None, + ) -> None: + self.config = config or M3Config() + self.tools = [] + self.mcp = mcp + self._mcp_config_generators = ALL_MCP_CONFIG_GENERATORS + self._built = False + + def with_config(self, config: M3Config) -> "M3": + new = M3( + config=config, + mcp=self.mcp, + ) + new.tools = self.tools[:] + return new + + def with_tool(self, tool: BaseTool) -> "M3": + new = M3( + config=self.config, + mcp=self.mcp, + ) + new.tools = [*self.tools, tool] + return new + + def with_tools(self, tools: List[BaseTool]) -> "M3": + new = M3( + config=self.config, + mcp=self.mcp, + ) + new.tools = self.tools + tools + return new + + def with_preset(self, preset_name: str, **kwargs: Any) -> "M3": + from m3.core.preset.registry import ALL_PRESETS + + if preset_name not in ALL_PRESETS: + available_presets = list(ALL_PRESETS.keys()) + best_match, score = process.extractOne(preset_name, available_presets) or ( + None, + 0, + ) + suggestion_text = f" Did you mean '{best_match}'?" if score >= 80 else "" + raise M3PresetError(f"Unknown preset: {preset_name}.{suggestion_text}") + preset_class = ALL_PRESETS[preset_name] + config = kwargs.pop("config", self.config) + try: + preset_m3 = preset_class.create(config=config, **kwargs) + except Exception as e: + raise M3PresetError( + f"Failed to create preset '{preset_name}'", details=str(e) + ) from e + merged_tools = self.tools + preset_m3.tools + new = M3( + config=preset_m3.config, + mcp=preset_m3.mcp or self.mcp, + ) + new.tools = merged_tools + return new + + def build( + self, + type: str = "fastmcp", + command: Optional[str] = None, + args: Optional[List[str]] = None, + cwd: Optional[str] = None, + module_name: Optional[str] = None, + pipeline_config_path: Optional[str] = None, + save_path: Optional[str] = None, + ) -> Union[dict, str]: + try: + self._validate() + self._initialize_mcp() + self._initialize_tools() + self._register_actions() + self._built = True + return self._generate_config( + type, + command=command, + args=args, + cwd=cwd, + module_name=module_name, + pipeline_config_path=pipeline_config_path, + save_path=save_path, + ) + except Exception as e: + raise M3BuildError("Build process failed", details=str(e)) from e + + def run(self) -> None: + if not self._built: + raise M3BuildError("Call .build() before .run()") + try: + if not self.mcp: + raise M3InitializationError("MCP not initialized") + logger.info("Starting MCP server...") + self.mcp.run() # type: ignore + except Exception as e: + logger.error(f"Failed to run MCP server: {e}") + raise + finally: + self._teardown_tools() + logger.info("MCP server shutdown complete.") + + def save(self, path: str) -> None: + if not self._built: + raise M3BuildError("Call .build() before .save()") + try: + config_data = { + "config": self.config.to_dict(), + "tools": [ + {"type": tool.__class__.__name__.lower(), "params": tool.to_dict()} + for tool in self.tools + ], + } + with open(path, "w") as f: + json.dump(config_data, f, indent=4) + logger.info(f"โœ… Saved pipeline config to {path}.") + + except (TypeError, ValueError) as e: + logger.error(f"Serialization error: {e}") + raise M3BuildError(f"Failed to serialize: {e}") from e + except OSError as e: + logger.error(f"File write error: {e}") + raise + + @classmethod + def load(cls, path: str) -> "M3": + if not os.path.exists(path): + raise FileNotFoundError(f"Config not found: {path}") + try: + with open(path) as f: + data = json.load(f) + config = M3Config.from_dict(data["config"]) + instance = cls(config=config) + for tool_data in data.get("tools", []): + tool_type = tool_data["type"] + if tool_type not in ALL_TOOLS: + raise M3ValidationError(f"Unknown tool type: {tool_type}") + tool_cls = ALL_TOOLS[tool_type] + tool = tool_cls.from_dict(tool_data["params"]) + instance = instance.with_tool(tool) + instance._post_load() + instance._built = True + logger.info(f"Pipeline loaded from {path}") + return instance + except (json.JSONDecodeError, KeyError, ValueError, M3ConfigError) as e: + logger.error(f"Config load error: {e}") + raise M3ValidationError(f"Invalid config: {e}") from e + except OSError as e: + logger.error(f"File read error: {e}") + raise + + def _validate(self) -> None: + if not self.tools: + raise M3ValidationError("At least one tool must be added.") + self.config.validate_for_tools(self.tools) + + def _initialize_mcp(self) -> None: + if not self.mcp: + self.mcp = FastMCP("m3") + + def _initialize_tools(self) -> None: + for tool in self.tools: + try: + tool.initialize() + except Exception as e: + raise M3InitializationError( + f"Tool initialization failed for {tool.__class__.__name__}", + details=str(e), + ) from e + + def _register_actions(self) -> None: + actions = [action for tool in self.tools for action in tool.actions()] + for action in actions: + self.mcp.tool()(action) # type: ignore + + def _generate_config( + self, + type: str, + command: Optional[str] = None, + args: Optional[List[str]] = None, + cwd: Optional[str] = None, + module_name: Optional[str] = None, + pipeline_config_path: Optional[str] = None, + save_path: Optional[str] = None, + ) -> Union[dict, str]: + if type not in self._mcp_config_generators: + available_types = list(self._mcp_config_generators.keys()) + best_match, score = process.extractOne(type, available_types) or (None, 0) + suggestion_text = f" Did you mean '{best_match}'?" if score >= 80 else "" + raise M3ValidationError(f"Unknown config type: {type}.{suggestion_text}") + generator_class = self._mcp_config_generators[type] + return generator_class.generate( + self, + command=command, + args=args, + cwd=cwd, + module_name=module_name, + pipeline_config_path=pipeline_config_path, + save_path=save_path, + ) + + def _teardown_tools(self) -> None: + for tool in self.tools: + if hasattr(tool, "backends"): + for backend in tool.backends.values(): + backend.teardown() + + def _post_load(self) -> None: + for tool in self.tools: + tool.post_load() + if self.mcp: + self._register_actions() + self._built = True From de56abf2bb88075151dcacb5c95d82343ade22b3 Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:51:19 +0100 Subject: [PATCH 11/15] refactor(tests): improve unit-tests given new architecture --- tests/core/test_config.py | 138 ++++ tests/core/test_mcp_config_generator.py | 160 ++++ tests/core/test_server.py | 61 ++ tests/core/test_tool.py | 82 ++ tests/m3/test_cli.py | 272 +++++++ tests/m3/test_m3.py | 344 +++++++++ tests/test_cli.py | 153 ---- tests/test_config.py | 40 - tests/test_config_scripts.py | 118 --- tests/test_data_io.py | 50 -- tests/test_example.py | 6 - tests/test_mcp_server.py | 361 --------- tests/test_oauth2_basic.py | 208 ----- tests/tools/test_tools.py | 971 ++++++++++++++++++++++++ 14 files changed, 2028 insertions(+), 936 deletions(-) create mode 100644 tests/core/test_config.py create mode 100644 tests/core/test_mcp_config_generator.py create mode 100644 tests/core/test_server.py create mode 100644 tests/core/test_tool.py create mode 100644 tests/m3/test_cli.py create mode 100644 tests/m3/test_m3.py delete mode 100644 tests/test_cli.py delete mode 100644 tests/test_config.py delete mode 100644 tests/test_config_scripts.py delete mode 100644 tests/test_data_io.py delete mode 100644 tests/test_example.py delete mode 100644 tests/test_mcp_server.py delete mode 100644 tests/test_oauth2_basic.py create mode 100644 tests/tools/test_tools.py diff --git a/tests/core/test_config.py b/tests/core/test_config.py new file mode 100644 index 0000000..114e153 --- /dev/null +++ b/tests/core/test_config.py @@ -0,0 +1,138 @@ +import os +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from m3.core.config import M3Config +from m3.core.utils.exceptions import M3ConfigError + + +@pytest.fixture +def temp_env_vars(): + """Fixture to temporarily set environment variables.""" + original_env = os.environ.copy() + yield + os.environ.clear() + os.environ.update(original_env) + + +class TestM3Config: + """Tests for M3Config class.""" + + def test_init_default(self): + """Test default initialization.""" + config = M3Config() + assert config.log_level == "INFO" + assert config.env_vars == {} + assert isinstance(config.project_root, Path) + assert isinstance(config.data_dir, Path) + assert isinstance(config.databases_dir, Path) + assert isinstance(config.raw_files_dir, Path) + + def test_init_with_params(self): + """Test initialization with parameters.""" + env_vars = {"TEST_KEY": "value"} + config = M3Config(log_level="DEBUG", env_vars=env_vars) + assert config.log_level == "DEBUG" + assert config.env_vars == env_vars + + def test_to_dict(self): + """Test to_dict method.""" + config = M3Config(env_vars={"KEY": "value"}) + data = config.to_dict() + assert data["log_level"] == "INFO" + assert data["env_vars"] == {"KEY": "value"} + + def test_from_dict(self): + """Test from_dict class method.""" + data = {"log_level": "DEBUG", "env_vars": {"KEY": "value"}} + config = M3Config.from_dict(data) + assert config.log_level == "DEBUG" + assert config.env_vars == {"KEY": "value"} + + def test_from_dict_missing_key(self): + """Test from_dict raises error on missing key.""" + data = {"log_level": "INFO"} + with pytest.raises(M3ConfigError, match="Missing required config key"): + M3Config.from_dict(data) + + def test_get_env_var(self, temp_env_vars): + """Test get_env_var method.""" + os.environ["TEST_ENV"] = "env_value" + config = M3Config(env_vars={"CONFIG_KEY": "config_value"}) + assert config.get_env_var("TEST_ENV") == "env_value" + assert config.get_env_var("CONFIG_KEY") == "config_value" + assert config.get_env_var("MISSING", default="default") == "default" + + def test_get_env_var_raise_if_missing(self): + """Test get_env_var raises if missing and required.""" + config = M3Config() + with pytest.raises(M3ConfigError, match="Missing required env var"): + config.get_env_var("MISSING", raise_if_missing=True) + + def test_validate_for_tools_success(self): + """Test validate_for_tools method success.""" + from m3.core.tool.base import BaseTool + + mock_tool = MagicMock(spec=BaseTool) + mock_tool.__class__.__name__ = "TestTool" + mock_tool.required_env_vars = {"REQUIRED": None} + config = M3Config(env_vars={"TESTTOOL_REQUIRED": "value"}) + config.validate_for_tools([mock_tool]) + + def test_validate_for_tools_error(self): + """Test validate_for_tools raises on error.""" + from m3.core.tool.base import BaseTool + + mock_tool = MagicMock(spec=BaseTool) + mock_tool.__class__.__name__ = "TestTool" + mock_tool.required_env_vars = {"FAKE_MISSING": None} + config = M3Config(env_vars={}) + with pytest.raises(M3ConfigError): + config.validate_for_tools([mock_tool]) + + def test_merge_env(self): + """Test merge_env method.""" + config = M3Config(env_vars={"EXISTING": "old"}) + new_env = {"NEW": "value"} + config.merge_env(new_env) + assert config.env_vars["NEW"] == "value" + assert config.env_vars["EXISTING"] == "old" + + def test_merge_env_conflict(self): + """Test merge_env raises on conflict.""" + config = M3Config(env_vars={"KEY": "old"}) + with pytest.raises(M3ConfigError, match="Env conflict"): + config.merge_env({"KEY": "new"}) + + @patch("m3.core.config.Path.home") + def test_project_root_fallback(self, mock_home): + """Test project root fallback to home.""" + mock_home.return_value = Path("/home/user") + with patch("pathlib.Path.exists", return_value=False): + config = M3Config() + assert config.project_root == Path("/home/user") + + def test_invalid_log_level(self): + """Test invalid log level raises error.""" + with pytest.raises(M3ConfigError, match="Invalid log level"): + M3Config(log_level="INVALID") + + def test_get_env_var_error_success(self): + """Test _get_env_var_error returns None on success.""" + config = M3Config(env_vars={"KEY": "value"}) + assert config._get_env_var_error("KEY", None) is None + + def test_get_env_var_error_missing(self): + """Test _get_env_var_error returns error message when missing.""" + config = M3Config() + error = config._get_env_var_error("MISSING", None) + assert error is not None + assert "Missing required env var" in error + + def test_get_data_dir_with_env(self, temp_env_vars): + """Test _get_data_dir uses env var when set.""" + os.environ["M3_DATA_DIR"] = "/custom/data" + config = M3Config() + assert config._get_data_dir() == Path("/custom/data") diff --git a/tests/core/test_mcp_config_generator.py b/tests/core/test_mcp_config_generator.py new file mode 100644 index 0000000..c08fd47 --- /dev/null +++ b/tests/core/test_mcp_config_generator.py @@ -0,0 +1,160 @@ +import os +from pathlib import Path +from unittest.mock import MagicMock, mock_open, patch + +import pytest + +from m3.core.mcp_config_generator.mcp_config_generators.claude_mcp_config import ( + ClaudeConfigGenerator, +) +from m3.core.mcp_config_generator.mcp_config_generators.fast_mcp_config import ( + FastMCPConfigGenerator, +) +from m3.core.utils.exceptions import M3ValidationError +from m3.m3 import M3 + + +@pytest.fixture +def mock_m3() -> M3: + """Fixture for mock M3 instance.""" + from m3.core.config import M3Config + + config = M3Config(env_vars={"TEST_ENV": "value"}) + m3 = M3(config=config) + return m3 + + +class TestClaudeConfigGenerator: + """Tests for ClaudeConfigGenerator.""" + + @patch( + "shutil.which", + return_value="/usr/bin/python", + ) + @patch("os.path.isdir", return_value=True) + @patch( + "m3.core.mcp_config_generator.mcp_config_generators.claude_mcp_config.ClaudeConfigGenerator._get_claude_config_path" + ) + def test_generate_with_defaults( + self, + mock_get_path: MagicMock, + mock_isdir: MagicMock, + mock_which: MagicMock, + mock_m3: M3, + tmp_path: Path, + ) -> None: + """Test generating config with defaults.""" + mock_get_path.return_value = None + config = ClaudeConfigGenerator.generate(mock_m3) + assert isinstance(config, dict) + assert "mcpServers" in config + assert "m3" in config["mcpServers"] + server = config["mcpServers"]["m3"] + assert server["command"].endswith("python") + assert server["args"] == ["-m", "m3.core.server"] + assert os.path.isdir(server["cwd"]) + assert "TEST_ENV" in server["env"] + + @patch("shutil.which", return_value=None) + def test_invalid_command_raises_error( + self, mock_which: MagicMock, mock_m3: M3 + ) -> None: + """Test invalid command raises error.""" + with pytest.raises(M3ValidationError, match="Invalid command"): + ClaudeConfigGenerator.generate(mock_m3, command="/invalid/python") + + @patch("os.path.isdir", return_value=False) + def test_invalid_cwd_raises_error(self, mock_isdir: MagicMock, mock_m3: M3) -> None: + """Test invalid cwd raises error.""" + with pytest.raises(M3ValidationError, match="Invalid cwd"): + ClaudeConfigGenerator.generate(mock_m3, cwd="/invalid/dir") + + @patch("builtins.open", new_callable=mock_open) + @patch("json.load") + @patch("json.dump") + @patch( + "m3.core.mcp_config_generator.mcp_config_generators.claude_mcp_config.ClaudeConfigGenerator._get_claude_config_path" + ) + @patch("pathlib.Path.exists") + def test_merge_with_existing_config( + self, + mock_exists: MagicMock, + mock_get_path: MagicMock, + mock_dump: MagicMock, + mock_load: MagicMock, + mock_open_file: MagicMock, + mock_m3: M3, + tmp_path: Path, + ) -> None: + """Test merging with existing Claude config.""" + mock_path = tmp_path / "claude_config.json" + mock_get_path.return_value = mock_path + mock_exists.return_value = True + mock_load.return_value = {"mcpServers": {"existing": {}}} + _config = ClaudeConfigGenerator.generate(mock_m3) + mock_dump.assert_called_once() + dumped_config = mock_dump.call_args[0][0] + assert "existing" in dumped_config["mcpServers"] + assert "m3" in dumped_config["mcpServers"] + + @patch("builtins.open", new_callable=mock_open) + @patch("json.dump") + def test_save_to_custom_path( + self, + mock_dump: MagicMock, + mock_open_file: MagicMock, + mock_m3: M3, + tmp_path: Path, + ) -> None: + """Test saving to custom path.""" + save_path = tmp_path / "custom.json" + config = ClaudeConfigGenerator.generate(mock_m3, save_path=str(save_path)) + mock_dump.assert_called_once_with(config, mock_open_file(), indent=2) + + +class TestFastMCPConfigGenerator: + """Tests for FastMCPConfigGenerator.""" + + @patch("shutil.which", return_value="/usr/bin/python") + @patch("os.path.isdir", return_value=True) + def test_generate_with_defaults( + self, mock_isdir: MagicMock, mock_which: MagicMock, mock_m3: M3 + ) -> None: + """Test generating config with defaults.""" + config = FastMCPConfigGenerator.generate(mock_m3) + assert isinstance(config, dict) + assert "mcpServers" in config + assert "m3" in config["mcpServers"] + server = config["mcpServers"]["m3"] + assert server["command"].endswith("python") + assert server["args"] == ["-m", "m3.core.server"] + assert os.path.isdir(server["cwd"]) + assert "TEST_ENV" in server["env"] + + @patch("shutil.which", return_value=None) + def test_invalid_command_raises_error( + self, mock_which: MagicMock, mock_m3: M3 + ) -> None: + """Test invalid command raises error.""" + with pytest.raises(M3ValidationError, match="Invalid command"): + FastMCPConfigGenerator.generate(mock_m3, command="/invalid/python") + + @patch("os.path.isdir", return_value=False) + def test_invalid_cwd_raises_error(self, mock_isdir: MagicMock, mock_m3: M3) -> None: + """Test invalid cwd raises error.""" + with pytest.raises(M3ValidationError, match="Invalid cwd"): + FastMCPConfigGenerator.generate(mock_m3, cwd="/invalid/dir") + + @patch("builtins.open", new_callable=mock_open) + @patch("json.dump") + def test_save_to_custom_path( + self, + mock_dump: MagicMock, + mock_open_file: MagicMock, + mock_m3: M3, + tmp_path: Path, + ) -> None: + """Test saving to custom path.""" + save_path = tmp_path / "custom.json" + config = FastMCPConfigGenerator.generate(mock_m3, save_path=str(save_path)) + mock_dump.assert_called_once_with(config, mock_open_file(), indent=2) diff --git a/tests/core/test_server.py b/tests/core/test_server.py new file mode 100644 index 0000000..1841a4b --- /dev/null +++ b/tests/core/test_server.py @@ -0,0 +1,61 @@ +import os +from unittest.mock import Mock, patch + +import pytest + +from m3.core.utils.exceptions import M3ValidationError +from m3.m3 import M3 + + +class TestMCPServer: + """Tests for MCP server.""" + + def test_server_can_be_imported_as_module(self) -> None: + """Test that the server can be imported as a module.""" + import m3.core.server + + assert hasattr(m3.core.server, "main") + assert callable(m3.core.server.main) + + @patch.dict(os.environ, {"M3_CONFIG_PATH": "test_config.json"}) + @patch("m3.core.server.M3.load") + def test_main_success(self, mock_load: Mock) -> None: + """Test main function with valid config.""" + mock_m3 = Mock(spec=M3) + mock_load.return_value = mock_m3 + from m3.core.server import main + + main() + mock_load.assert_called_once_with("test_config.json") + mock_m3.build.assert_called_once() + mock_m3.run.assert_called_once() + + @patch.dict(os.environ, clear=True) + def test_main_no_config_path(self) -> None: + """Test main raises error when M3_CONFIG_PATH is not set.""" + from m3.core.server import main + + with pytest.raises(M3ValidationError, match="M3_CONFIG_PATH env var not set"): + main() + + @patch.dict(os.environ, {"M3_CONFIG_PATH": "invalid.json"}) + @patch("m3.core.server.M3.load") + def test_main_load_failure(self, mock_load: Mock) -> None: + """Test main handles load failure.""" + mock_load.side_effect = FileNotFoundError("Config not found") + from m3.core.server import main + + with pytest.raises(FileNotFoundError): + main() + + @patch.dict(os.environ, {"M3_CONFIG_PATH": "test.json"}) + @patch("m3.core.server.M3.load") + def test_main_build_failure(self, mock_load: Mock) -> None: + """Test main handles build failure.""" + mock_m3 = Mock(spec=M3) + mock_load.return_value = mock_m3 + mock_m3.build.side_effect = M3ValidationError("Build failed") + from m3.core.server import main + + with pytest.raises(M3ValidationError): + main() diff --git a/tests/core/test_tool.py b/tests/core/test_tool.py new file mode 100644 index 0000000..c2b1cdf --- /dev/null +++ b/tests/core/test_tool.py @@ -0,0 +1,82 @@ +from pathlib import Path +from unittest.mock import patch + +import pytest + +from m3.core.tool.backend.backends.bigquery import BigQueryBackend +from m3.core.tool.backend.backends.sqlite import SQLiteBackend +from m3.core.tool.backend.base import BackendBase +from m3.core.tool.base import BaseTool +from m3.core.tool.cli.base import BaseToolCLI + + +class TestBaseTool: + """Tests for BaseTool.""" + + def test_abstract_methods(self): + """Test BaseTool abstract methods.""" + with pytest.raises(TypeError): + BaseTool() + + +class TestBackendBase: + """Tests for BackendBase.""" + + def test_abstract_methods(self): + """Test BackendBase abstract methods.""" + with pytest.raises(TypeError): + BackendBase() + + +class TestSQLiteBackend: + """Tests for SQLiteBackend.""" + + def test_init(self, tmp_path: Path): + """Test initialization.""" + db_path = tmp_path / "M3_test_environment_test.db" + backend = SQLiteBackend(path=str(db_path)) + assert backend.path == str(db_path) + assert backend.connection is None + if db_path.exists(): + db_path.unlink() + + def test_initialize(self, tmp_path: Path): + """Test initialize creates connection.""" + db_path = tmp_path / "M3_test_environment_test.db" + backend = SQLiteBackend(path=str(db_path)) + backend.initialize() + assert backend.connection is not None + backend.teardown() + assert backend.connection is None + if db_path.exists(): + db_path.unlink() + + +class TestBigQueryBackend: + """Tests for BigQueryBackend.""" + + @patch("google.cloud.bigquery.Client") + def test_init(self, mock_client): + """Test initialization.""" + backend = BigQueryBackend(project="test-project") + assert backend.project == "test-project" + assert backend.client is None + + @patch("google.cloud.bigquery.Client") + def test_initialize(self, mock_client): + """Test initialize creates client.""" + backend = BigQueryBackend(project="test-project") + backend.initialize() + mock_client.assert_called_once_with(project="test-project") + assert backend.client is not None + backend.teardown() + assert backend.client is None + + +class TestBaseToolCLI: + """Tests for BaseToolCLI.""" + + def test_abstract_methods(self): + """Test BaseToolCLI abstract methods.""" + with pytest.raises(TypeError): + BaseToolCLI() diff --git a/tests/m3/test_cli.py b/tests/m3/test_cli.py new file mode 100644 index 0000000..8b14d9c --- /dev/null +++ b/tests/m3/test_cli.py @@ -0,0 +1,272 @@ +from pathlib import Path +from unittest.mock import MagicMock, patch + +from typer.testing import CliRunner + +import m3.core.preset.registry as preset_registry +from m3.cli import M3CLI +from m3.core.config import M3Config + + +def _strip_ansi_codes(text: str) -> str: + """Strip ANSI (rich) escape codes from text.""" + import re + + ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") + return ansi_escape.sub("", text) + + +runner = CliRunner() +app = M3CLI().app + + +class TestM3CLI: + """Tests for M3CLI commands and related functionality.""" + + def test_version(self) -> None: + """Test version command.""" + result = runner.invoke(app, ["--version"]) + assert result.exit_code == 0 + assert "M3 CLI Version" in result.stdout + + def test_list_presets(self) -> None: + """Test list-presets command.""" + result = runner.invoke(app, ["list-presets"]) + assert result.exit_code == 0 + assert "Available Presets" in result.stdout + + def test_list_tools(self) -> None: + """Test list-tools command.""" + result = runner.invoke(app, ["list-tools"]) + assert result.exit_code == 0 + assert "Available Tools" in result.stdout + + @patch("m3.m3.M3.run") + @patch("m3.m3.M3.build") + @patch("m3.m3.M3.save") + def test_run_with_preset( + self, mock_save: MagicMock, mock_build: MagicMock, mock_run: MagicMock + ) -> None: + """Test run with preset.""" + with patch("m3.core.preset.registry.ALL_PRESETS", {"default_m3": MagicMock()}): + mock_preset_class = preset_registry.ALL_PRESETS["default_m3"] + mock_preset_m3 = MagicMock() + mock_preset_m3.config = M3Config() + mock_preset_m3.mcp = None + mock_preset_m3.tools = [] + mock_preset_class.configure_mock(**{"create.return_value": mock_preset_m3}) + result = runner.invoke(app, ["run", "--presets", "default_m3"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "Applying preset 'default_m3'" in output + mock_build.assert_called_once() + mock_save.assert_called_once() + mock_run.assert_called_once() + + @patch("sqlite3.connect") + def test_build_with_preset(self, mock_connect: MagicMock) -> None: + """Test build with preset.""" + mock_conn = mock_connect.return_value + mock_conn.cursor.return_value = MagicMock() # Mock cursor if needed + result = runner.invoke(app, ["build", "--presets", "default_m3"]) + assert result.exit_code == 0 + + def test_search_presets(self) -> None: + """Test search for presets.""" + result = runner.invoke(app, ["search", "default", "--type", "presets"]) + assert result.exit_code == 0 + assert "Presets matches" in result.stdout + + def test_search_invalid_type(self) -> None: + """Test search with invalid type.""" + result = runner.invoke(app, ["search", "query", "--type", "invalid"]) + assert result.exit_code != 0 + assert "Invalid type" in result.stdout + + @patch("m3.tools.mimic.cli.MimicCLI.configure") + def test_add_tool(self, mock_configure: MagicMock) -> None: + """Test pipeline command.""" + mock_configure.return_value = { + "env_vars": { + "M3_BACKEND": "sqlite", + "M3_DB_PATH": "M3_test_environment_test.db", + }, + "tool_params": { + "backends": [ + { + "type": "sqlite", + "params": {"path": "M3_test_environment_test.db"}, + } + ], + "backend_key": "sqlite", + }, + } + result = runner.invoke(app, ["pipeline", "mimic"]) + assert result.exit_code == 0 + + def test_tools_help(self) -> None: + """Test tools help.""" + result = runner.invoke(app, ["tools", "--help"]) + assert result.exit_code == 0 + assert "Access tool-specific subcommands" in result.stdout + + def test_run_with_invalid_preset(self) -> None: + """Test run with invalid preset raises error.""" + result = runner.invoke(app, ["run", "--presets", "invalid_preset"]) + assert result.exit_code != 0 + output = _strip_ansi_codes(result.output) + assert "Unknown preset" in output + + @patch("m3.tools.mimic.cli.MimicCLI.configure") + def test_add_tool_new_pipeline(self, mock_configure: MagicMock) -> None: + """Test adding tool to new pipeline.""" + mock_configure.return_value = { + "env_vars": { + "M3_BACKEND": "sqlite", + "M3_DB_PATH": "M3_test_environment_test.db", + }, + "tool_params": { + "backends": [ + { + "type": "sqlite", + "params": {"path": "M3_test_environment_test.db"}, + } + ], + "backend_key": "sqlite", + }, + } + result = runner.invoke( + app, ["pipeline", "mimic", "--new-pipeline", "M3_test_env_new.json"] + ) + assert result.exit_code == 0 + pipeline_path = Path("M3_test_env_new.json") + if pipeline_path.exists(): + pipeline_path.unlink() + + @patch("m3.tools.mimic.cli.MimicCLI.configure") + def test_add_tool_existing_pipeline(self, mock_configure: MagicMock) -> None: + """Test adding tool to existing pipeline.""" + mock_configure.return_value = { + "env_vars": { + "M3_BACKEND": "sqlite", + "M3_DB_PATH": "M3_test_environment_test.db", + }, + "tool_params": { + "backends": [ + { + "type": "sqlite", + "params": {"path": "M3_test_environment_test.db"}, + } + ], + "backend_key": "sqlite", + }, + } + result = runner.invoke( + app, ["pipeline", "mimic", "--to-pipeline", "M3_test_env_existing.json"] + ) + assert result.exit_code == 0 + pipeline_path = Path("M3_test_env_existing.json") + if pipeline_path.exists(): + pipeline_path.unlink() + + def test_add_tool_unknown(self) -> None: + """Test adding unknown tool raises error.""" + result = runner.invoke(app, ["pipeline", "unknown"]) + assert result.exit_code != 0 + assert "Unknown tool" in str(result.exception) + + def test_help_shows_correct_commands(self) -> None: + """Test that the help message shows the some key commands.""" + result = runner.invoke(app, ["--help"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "Usage" in output + assert "list-presets" in output + assert "list-tools" in output + assert "tools" in output + + def test_unknown_command_reports_error(self) -> None: + """Test that an unknown command reports an error.""" + result = runner.invoke(app, ["not-a-cmd"]) + assert result.exit_code != 0 + output = _strip_ansi_codes(result.output) + assert "No such command" in output + + @patch("sqlite3.connect") + @patch( + "m3.core.mcp_config_generator.mcp_config_generators.claude_mcp_config.ClaudeConfigGenerator.generate" + ) + def test_build_claude_success( + self, mock_generate: MagicMock, mock_connect: MagicMock + ) -> None: + """Test successful build for Claude Desktop.""" + mock_generate.return_value = {"mcpServers": {"m3": {}}} + mock_conn = mock_connect.return_value + mock_conn.cursor.return_value = MagicMock() + result = runner.invoke(app, ["build", "--config-type", "claude"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "Pipeline config saved" in output + + @patch("sqlite3.connect") + @patch( + "m3.core.mcp_config_generator.mcp_config_generators.fast_mcp_config.FastMCPConfigGenerator.generate" + ) + def test_build_fast_success( + self, mock_generate: MagicMock, mock_connect: MagicMock + ) -> None: + """Test successful build for Fast MCP Config Generator.""" + mock_generate.return_value = {"mcpServers": {"m3": {}}} + mock_conn = mock_connect.return_value + mock_conn.cursor.return_value = MagicMock() + result = runner.invoke(app, ["build", "--config-type", "fastmcp"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "Pipeline config saved" in output + + @patch("sqlite3.connect") + @patch( + "m3.core.mcp_config_generator.mcp_config_generators.universal_mcp_config.UniversalConfigGenerator.generate" + ) + def test_build_universal_success( + self, mock_generate: MagicMock, mock_connect: MagicMock + ) -> None: + """Test successful build for Universal MCP Config Generator.""" + mock_generate.return_value = {"mcpServers": {"m3": {}}} + mock_conn = mock_connect.return_value + mock_conn.cursor.return_value = MagicMock() + result = runner.invoke(app, ["build", "--config-type", "universal"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "Pipeline config saved" in output + + @patch("sqlite3.connect") + @patch( + "m3.core.mcp_config_generator.mcp_config_generators.fast_mcp_config.FastMCPConfigGenerator.generate" + ) + def test_build_script_failure( + self, mock_generate: MagicMock, mock_connect: MagicMock + ) -> None: + """Test error handling when build fails (default fast config).""" + mock_generate.side_effect = Exception("Build failed") + mock_conn = mock_connect.return_value + mock_conn.cursor.return_value = MagicMock() + result = runner.invoke(app, ["build"]) + assert result.exit_code != 0 + output = _strip_ansi_codes(result.output) + assert "Build failed" in output + + def test_search_tools(self) -> None: + """Test that search finds tools.""" + result = runner.invoke(app, ["search", "mimic", "--type", "tools"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "Tools matches" in output + assert "mimic" in output + + def test_search_presets_and_tools_combined(self) -> None: + """Test that search finds both presets and tools when type is all.""" + result = runner.invoke(app, ["search", "m3"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "Presets matches" in output or "Tools matches" in output diff --git a/tests/m3/test_m3.py b/tests/m3/test_m3.py new file mode 100644 index 0000000..c55d0a6 --- /dev/null +++ b/tests/m3/test_m3.py @@ -0,0 +1,344 @@ +import json +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +from fastmcp import FastMCP + +from m3.core.config import M3Config +from m3.core.mcp_config_generator.registry import ALL_MCP_CONFIG_GENERATORS +from m3.core.tool.base import BaseTool +from m3.core.utils.exceptions import ( + M3BuildError, + M3InitializationError, + M3PresetError, + M3ValidationError, +) +from m3.m3 import M3 +from m3.tools.registry import ALL_TOOLS + + +@pytest.fixture +def mock_config() -> M3Config: + """Fixture for a mock M3Config.""" + return M3Config(env_vars={"TEST_ENV": "value"}) + + +@pytest.fixture +def mock_tool() -> BaseTool: + """Fixture for a mock BaseTool.""" + + class MockTool(BaseTool): + @classmethod + def from_dict(cls, params): + return cls() + + def actions(self): + pass + + def to_dict(self): + pass + + def __init__(self): + super().__init__() + self.required_env_vars = {} + self.actions = MagicMock(return_value=[lambda: "test_action"]) + self.to_dict = MagicMock(return_value={"param": "value"}) + self.initialize = MagicMock() + self.post_load = MagicMock() + + return MockTool() + + +@pytest.fixture +def mock_preset() -> MagicMock: + """Fixture for a mock preset M3 instance.""" + mock_m3 = MagicMock(spec=M3) + mock_m3.tools = [MagicMock(spec=BaseTool)] + mock_m3.config = MagicMock(spec=M3Config) + mock_m3.mcp = MagicMock(spec=FastMCP) + return mock_m3 + + +class TestM3: + """Tests for M3 class.""" + + def test_init_default(self) -> None: + """Test default initialization.""" + m3 = M3() + assert isinstance(m3.config, M3Config) + assert m3.tools == [] + assert m3.mcp is None + assert m3._mcp_config_generators == ALL_MCP_CONFIG_GENERATORS + assert not m3._built + + def test_init_with_params(self, mock_config: M3Config) -> None: + """Test initialization with parameters.""" + mock_mcp = MagicMock(spec=FastMCP) + m3 = M3(config=mock_config, mcp=mock_mcp) + assert m3.config == mock_config + assert m3.mcp == mock_mcp + + def test_with_config(self, mock_config: M3Config) -> None: + """Test chaining with_config.""" + m3 = M3() + new_m3 = m3.with_config(mock_config) + assert new_m3 != m3 + assert new_m3.config == mock_config + assert new_m3.tools == m3.tools + assert new_m3.mcp == m3.mcp + + def test_with_tool(self, mock_tool: BaseTool) -> None: + """Test chaining with_tool.""" + m3 = M3() + new_m3 = m3.with_tool(mock_tool) + assert new_m3 != m3 + assert new_m3.tools == [mock_tool] + assert new_m3.config == m3.config + assert new_m3.mcp == m3.mcp + + def test_with_tools(self, mock_tool: BaseTool) -> None: + """Test chaining with_tools.""" + m3 = M3() + new_m3 = m3.with_tools([mock_tool, mock_tool]) + assert new_m3 != m3 + assert len(new_m3.tools) == 2 + assert new_m3.config == m3.config + assert new_m3.mcp == m3.mcp + + @patch("m3.core.preset.registry.ALL_PRESETS", {"test_preset": MagicMock()}) + def test_with_preset_success(self, mock_preset: MagicMock) -> None: + """Test chaining with_preset success.""" + mock_preset_class = MagicMock() + mock_preset_class.create.return_value = mock_preset + with patch.dict( + "m3.core.preset.registry.ALL_PRESETS", {"test_preset": mock_preset_class} + ): + m3 = M3() + new_m3 = m3.with_preset("test_preset") + assert new_m3 != m3 + assert new_m3.tools == mock_preset.tools + assert new_m3.config == mock_preset.config + assert new_m3.mcp == mock_preset.mcp + + def test_with_preset_unknown(self) -> None: + """Test with_preset raises for unknown preset.""" + m3 = M3() + with pytest.raises(M3PresetError, match="Unknown preset"): + m3.with_preset("unknown") + + @patch("m3.core.preset.registry.ALL_PRESETS", {"test_preset": MagicMock()}) + def test_with_preset_failure(self) -> None: + """Test with_preset handles creation failure.""" + mock_preset_class = MagicMock() + mock_preset_class.create.side_effect = Exception("Creation failed") + with patch.dict( + "m3.core.preset.registry.ALL_PRESETS", {"test_preset": mock_preset_class} + ): + m3 = M3() + with pytest.raises(M3PresetError, match="Failed to create preset"): + m3.with_preset("test_preset") + + def test_build_success(self, mock_tool: BaseTool) -> None: + """Test successful build.""" + with patch.dict(ALL_MCP_CONFIG_GENERATORS, {"test": MagicMock()}): + mock_generator = ALL_MCP_CONFIG_GENERATORS["test"] + mock_generator.generate.return_value = {"config": "test"} + mock_mcp = MagicMock(spec=FastMCP) + mock_mcp.tool.return_value = lambda x: x + m3 = M3(mcp=mock_mcp).with_tool(mock_tool) + result = m3.build(type="test") + assert result == {"config": "test"} + assert m3._built + mock_tool.initialize.assert_called_once() + mock_generator.generate.assert_called_once() + + def test_build_no_tools(self) -> None: + """Test build fails with no tools.""" + m3 = M3() + with pytest.raises(M3BuildError): + m3.build() + + def test_build_validation_failure(self, mock_tool: BaseTool) -> None: + """Test build fails on validation.""" + mock_tool.required_env_vars = {"MISSING": None} + m3 = M3().with_tool(mock_tool) + with pytest.raises(M3BuildError): + m3.build() + + def test_build_init_failure(self, mock_tool: BaseTool) -> None: + """Test build fails on tool init.""" + mock_tool.initialize.side_effect = Exception("Init failed") + m3 = M3().with_tool(mock_tool) + with pytest.raises(M3BuildError): + m3.build() + + def test_build_unknown_type(self) -> None: + """Test build fails on unknown config type.""" + m3 = M3() + with pytest.raises(M3BuildError): + m3.build(type="unknown") + + def test_run_not_built(self) -> None: + """Test run fails if not built.""" + m3 = M3() + with pytest.raises(M3BuildError, match="Call .build()"): + m3.run() + + def test_run_no_mcp(self) -> None: + """Test run fails if no MCP.""" + m3 = M3() + m3._built = True + with pytest.raises(M3InitializationError, match="MCP not initialized"): + m3.run() + + def test_run_success(self, mock_tool: BaseTool) -> None: + """Test successful run.""" + mock_mcp = MagicMock(spec=FastMCP) + m3 = M3(mcp=mock_mcp).with_tool(mock_tool) + m3._built = True + with patch.object(m3, "_teardown_tools") as mock_teardown: + m3.run() + mock_mcp.run.assert_called_once() + mock_teardown.assert_called_once() + + def test_run_exception(self, mock_tool: BaseTool) -> None: + """Test run handles exception.""" + mock_mcp = MagicMock(spec=FastMCP) + mock_mcp.run.side_effect = Exception("Run failed") + m3 = M3(mcp=mock_mcp).with_tool(mock_tool) + m3._built = True + with patch.object(m3, "_teardown_tools") as mock_teardown: + with pytest.raises(Exception, match="Run failed"): + m3.run() + mock_teardown.assert_called_once() + + def test_save_not_built(self) -> None: + """Test save fails if not built.""" + m3 = M3() + with pytest.raises(M3BuildError, match="Call .build()"): + m3.save("test.json") + + def test_save_success(self, tmp_path: Path, mock_tool: BaseTool) -> None: + """Test successful save.""" + path = tmp_path / "config.json" + m3 = M3().with_tool(mock_tool) + m3._built = True + m3.save(str(path)) + assert path.exists() + with open(path) as f: + data = json.load(f) + assert "config" in data + assert "tools" in data + assert len(data["tools"]) == 1 + + def test_save_serialization_error(self, mock_tool: BaseTool) -> None: + """Test save handles serialization error.""" + mock_tool.to_dict.side_effect = TypeError("Serialization failed") + m3 = M3().with_tool(mock_tool) + m3._built = True + with pytest.raises(M3BuildError, match="Failed to serialize"): + m3.save("test.json") + + def test_load_success(self, tmp_path: Path) -> None: + """Test successful load.""" + path = tmp_path / "config.json" + data = { + "config": {"log_level": "INFO", "env_vars": {}}, + "tools": [{"type": "mocktool", "params": {"param": "value"}}], + } + with open(path, "w") as f: + json.dump(data, f) + + with patch.dict(ALL_TOOLS, {"mocktool": MagicMock()}): + mock_tool_cls = ALL_TOOLS["mocktool"] + mock_tool = MagicMock(spec=BaseTool) + mock_tool_cls.from_dict.return_value = mock_tool + m3 = M3.load(str(path)) + assert isinstance(m3, M3) + assert m3._built + mock_tool.post_load.assert_called_once() + + def test_load_file_not_found(self) -> None: + """Test load fails if file not found.""" + with pytest.raises(FileNotFoundError): + M3.load("nonexistent.json") + + def test_load_invalid_config(self, tmp_path: Path) -> None: + """Test load fails on invalid config.""" + path = tmp_path / "invalid.json" + with open(path, "w") as f: + json.dump({"invalid": "data"}, f) + with pytest.raises(M3ValidationError, match="Invalid config"): + M3.load(str(path)) + + def test_load_unknown_tool(self, tmp_path: Path) -> None: + """Test load fails on unknown tool.""" + path = tmp_path / "config.json" + data = { + "config": {"log_level": "INFO", "env_vars": {}}, + "tools": [{"type": "unknown", "params": {}}], + } + with open(path, "w") as f: + json.dump(data, f) + with pytest.raises(M3ValidationError, match="Unknown tool type"): + M3.load(str(path)) + + def test_teardown_tools(self, mock_tool: BaseTool) -> None: + """Test teardown of tools.""" + mock_backend = MagicMock() + mock_tool.backends = {"test": mock_backend} + m3 = M3().with_tool(mock_tool) + m3._teardown_tools() + mock_backend.teardown.assert_called_once() + + def test_post_load(self, mock_tool: BaseTool) -> None: + """Test post_load calls tool post_load.""" + m3 = M3().with_tool(mock_tool) + m3._post_load() + mock_tool.post_load.assert_called_once() + assert m3._built + + @patch("builtins.open") + def test_save_oserror( + self, mock_open: MagicMock, mock_tool: BaseTool, caplog + ) -> None: + """Test save handles OSError during file write.""" + mock_open.side_effect = OSError("Permission denied") + m3 = M3().with_tool(mock_tool) + m3._built = True + with pytest.raises(OSError) as exc_info: + m3.save("test.json") + assert "Permission denied" in str(exc_info.value) + assert "File write error: Permission denied" in caplog.text + + @patch("os.path.exists", return_value=True) + @patch("builtins.open") + def test_load_oserror( + self, mock_open: MagicMock, mock_exists: MagicMock, caplog + ) -> None: + """Test load handles OSError during file read.""" + mock_open.side_effect = OSError("Permission denied") + with pytest.raises(OSError) as exc_info: + M3.load("test.json") + assert "Permission denied" in str(exc_info.value) + assert "File read error: Permission denied" in caplog.text + + def test_generate_config_unknown_type_no_suggestion(self) -> None: + """Test _generate_config raises for unknown type without suggestion.""" + with patch.dict(ALL_MCP_CONFIG_GENERATORS, {"unrelated": MagicMock()}): + m3 = M3() + with pytest.raises(M3ValidationError) as exc_info: + m3._generate_config(type="unknown") + assert "Unknown config type: unknown." in str(exc_info.value) + assert "Did you mean" not in str(exc_info.value) + + def test_generate_config_unknown_type_with_suggestion(self) -> None: + """Test _generate_config raises for unknown type with suggestion.""" + with patch.dict(ALL_MCP_CONFIG_GENERATORS, {"fastmcp": MagicMock()}): + m3 = M3() + with pytest.raises(M3ValidationError) as exc_info: + m3._generate_config(type="fastmc") + assert "Unknown config type: fastmc. Did you mean 'fastmcp'?" in str( + exc_info.value + ) diff --git a/tests/test_cli.py b/tests/test_cli.py deleted file mode 100644 index e6181cd..0000000 --- a/tests/test_cli.py +++ /dev/null @@ -1,153 +0,0 @@ -import subprocess -import tempfile -from pathlib import Path -from unittest.mock import MagicMock, patch - -import pytest -from typer.testing import CliRunner - -import m3.cli as cli_module -from m3.cli import app - -runner = CliRunner() - - -@pytest.fixture(autouse=True) -def inject_version(monkeypatch): - monkeypatch.setattr(cli_module, "__version__", "0.0.1") - - -def test_help_shows_app_name(): - result = runner.invoke(app, ["--help"]) - assert result.exit_code == 0 - assert "M3 CLI" in result.stdout - - -def test_version_option_exits_zero_and_shows_version(): - result = runner.invoke(app, ["--version"]) - assert result.exit_code == 0 - assert "M3 CLI Version: 0.0.1" in result.stdout - - -def test_unknown_command_reports_error(): - result = runner.invoke(app, ["not-a-cmd"]) - assert result.exit_code != 0 - # Check both stdout and stderr since error messages might go to either depending on environment - error_message = "No such command 'not-a-cmd'" - assert ( - error_message in result.stdout - or (hasattr(result, "stderr") and error_message in result.stderr) - or error_message in result.output - ) - - -@patch("m3.cli.initialize_dataset") -@patch("sqlite3.connect") -def test_init_command_respects_custom_db_path( - mock_sqlite_connect, mock_initialize_dataset -): - """Test that m3 init --db-path correctly uses custom database path override.""" - # Setup mocks - mock_initialize_dataset.return_value = True - - # Mock sqlite connection and cursor for verification query - mock_cursor = mock_sqlite_connect.return_value.cursor.return_value - mock_cursor.fetchone.return_value = (100,) # Mock row count result - - with tempfile.TemporaryDirectory() as temp_dir: - custom_db_path = Path(temp_dir) / "custom_mimic.db" - # Resolve the path to handle symlinks (like /var -> /private/var on macOS) - resolved_custom_db_path = custom_db_path.resolve() - - # Run the init command with custom db path - result = runner.invoke( - app, ["init", "mimic-iv-demo", "--db-path", str(custom_db_path)] - ) - - # Assert command succeeded - assert result.exit_code == 0 - - # Verify the output mentions the custom path (either original or resolved form) - assert ( - str(custom_db_path) in result.stdout - or str(resolved_custom_db_path) in result.stdout - ) - assert "Target database path:" in result.stdout - - # Verify initialize_dataset was called with the resolved custom path - mock_initialize_dataset.assert_called_once_with( - dataset_name="mimic-iv-demo", db_target_path=resolved_custom_db_path - ) - - # Verify sqlite connection was attempted with the resolved custom path - mock_sqlite_connect.assert_called_with(resolved_custom_db_path) - - -def test_config_validation_sqlite_with_project_id(): - """Test that sqlite backend rejects project-id parameter.""" - result = runner.invoke( - app, ["config", "claude", "--backend", "sqlite", "--project-id", "test"] - ) - assert result.exit_code == 1 - # Check output - error messages from typer usually go to stdout - assert "project-id can only be used with --backend bigquery" in result.output - - -def test_config_validation_bigquery_with_db_path(): - """Test that bigquery backend rejects db-path parameter.""" - result = runner.invoke( - app, ["config", "claude", "--backend", "bigquery", "--db-path", "/test/path"] - ) - assert result.exit_code == 1 - # Check output - error messages from typer usually go to stdout - assert "db-path can only be used with --backend sqlite" in result.output - - -def test_config_validation_bigquery_requires_project_id(): - """Test that bigquery backend requires project-id parameter.""" - result = runner.invoke(app, ["config", "claude", "--backend", "bigquery"]) - assert result.exit_code == 1 - # Check output - error messages from typer usually go to stdout - assert "project-id is required when using --backend bigquery" in result.output - - -@patch("subprocess.run") -def test_config_claude_success(mock_subprocess): - """Test successful Claude Desktop configuration.""" - mock_subprocess.return_value = MagicMock(returncode=0) - - result = runner.invoke(app, ["config", "claude"]) - assert result.exit_code == 0 - assert "Claude Desktop configuration completed" in result.stdout - - # Verify subprocess was called with correct script - mock_subprocess.assert_called_once() - call_args = mock_subprocess.call_args[0][0] - assert "setup_claude_desktop.py" in call_args[1] # Script path is second argument - - -@patch("subprocess.run") -def test_config_universal_quick_mode(mock_subprocess): - """Test universal config generator in quick mode.""" - mock_subprocess.return_value = MagicMock(returncode=0) - - result = runner.invoke(app, ["config", "--quick"]) - assert result.exit_code == 0 - assert "Generating M3 MCP configuration" in result.stdout - - # Verify subprocess was called with dynamic config script - mock_subprocess.assert_called_once() - call_args = mock_subprocess.call_args[0][0] - assert "dynamic_mcp_config.py" in call_args[1] # Script path is second argument - assert "--quick" in call_args - - -@patch("subprocess.run") -def test_config_script_failure(mock_subprocess): - """Test error handling when config script fails.""" - mock_subprocess.side_effect = subprocess.CalledProcessError(1, "cmd") - - result = runner.invoke(app, ["config", "claude"]) - assert result.exit_code == 1 - # Just verify that the command failed with the right exit code - # The specific error message may vary diff --git a/tests/test_config.py b/tests/test_config.py deleted file mode 100644 index 6b159f3..0000000 --- a/tests/test_config.py +++ /dev/null @@ -1,40 +0,0 @@ -from pathlib import Path - -from m3.config import ( - get_dataset_config, - get_dataset_raw_files_path, - get_default_database_path, -) - - -def test_get_dataset_config_known(): - cfg = get_dataset_config("mimic-iv-demo") - assert isinstance(cfg, dict) - assert cfg.get("default_db_filename") == "mimic_iv_demo.db" - - -def test_get_dataset_config_unknown(): - assert get_dataset_config("not-a-dataset") is None - - -def test_default_paths(tmp_path, monkeypatch): - # Redirect default dirs to a temp location - import m3.config as cfg_mod - - monkeypatch.setattr(cfg_mod, "DEFAULT_DATABASES_DIR", tmp_path / "dbs") - monkeypatch.setattr(cfg_mod, "DEFAULT_RAW_FILES_DIR", tmp_path / "raw") - db_path = get_default_database_path("mimic-iv-demo") - raw_path = get_dataset_raw_files_path("mimic-iv-demo") - # They should be Path objects and exist - assert isinstance(db_path, Path) - assert db_path.parent.exists() - assert isinstance(raw_path, Path) - assert raw_path.exists() - - -def test_raw_path_includes_dataset_name(tmp_path, monkeypatch): - import m3.config as cfg_mod - - monkeypatch.setattr(cfg_mod, "DEFAULT_RAW_FILES_DIR", tmp_path / "raw") - raw_path = get_dataset_raw_files_path("mimic-iv-demo") - assert "mimic-iv-demo" in str(raw_path) diff --git a/tests/test_config_scripts.py b/tests/test_config_scripts.py deleted file mode 100644 index 2430c77..0000000 --- a/tests/test_config_scripts.py +++ /dev/null @@ -1,118 +0,0 @@ -"""Tests for MCP configuration scripts.""" - -import sys -from pathlib import Path -from unittest.mock import patch - -import pytest - -sys.path.insert(0, str(Path(__file__).parent.parent / "src")) - -from m3.mcp_client_configs.dynamic_mcp_config import MCPConfigGenerator - - -class TestMCPConfigGenerator: - """Test the MCPConfigGenerator class.""" - - def test_generate_config_sqlite_default(self): - """Test generating SQLite config with defaults.""" - generator = MCPConfigGenerator() - - with ( - patch.object(generator, "_validate_python_path", return_value=True), - patch.object(generator, "_validate_directory", return_value=True), - ): - config = generator.generate_config() - - assert config["mcpServers"]["m3"]["env"]["M3_BACKEND"] == "sqlite" - assert "M3_PROJECT_ID" not in config["mcpServers"]["m3"]["env"] - assert config["mcpServers"]["m3"]["args"] == ["-m", "m3.mcp_server"] - - def test_generate_config_bigquery_with_project(self): - """Test generating BigQuery config with project ID.""" - generator = MCPConfigGenerator() - - with ( - patch.object(generator, "_validate_python_path", return_value=True), - patch.object(generator, "_validate_directory", return_value=True), - ): - config = generator.generate_config( - backend="bigquery", project_id="test-project" - ) - - assert config["mcpServers"]["m3"]["env"]["M3_BACKEND"] == "bigquery" - assert config["mcpServers"]["m3"]["env"]["M3_PROJECT_ID"] == "test-project" - assert ( - config["mcpServers"]["m3"]["env"]["GOOGLE_CLOUD_PROJECT"] - == "test-project" - ) - - def test_generate_config_sqlite_with_db_path(self): - """Test generating SQLite config with custom database path.""" - generator = MCPConfigGenerator() - - with ( - patch.object(generator, "_validate_python_path", return_value=True), - patch.object(generator, "_validate_directory", return_value=True), - ): - config = generator.generate_config( - backend="sqlite", db_path="/custom/path/database.db" - ) - - assert config["mcpServers"]["m3"]["env"]["M3_BACKEND"] == "sqlite" - assert ( - config["mcpServers"]["m3"]["env"]["M3_DB_PATH"] - == "/custom/path/database.db" - ) - - def test_generate_config_custom_server_name(self): - """Test generating config with custom server name.""" - generator = MCPConfigGenerator() - - with ( - patch.object(generator, "_validate_python_path", return_value=True), - patch.object(generator, "_validate_directory", return_value=True), - ): - config = generator.generate_config(server_name="custom-m3") - - assert "custom-m3" in config["mcpServers"] - assert "m3" not in config["mcpServers"] - - def test_generate_config_additional_env_vars(self): - """Test generating config with additional environment variables.""" - generator = MCPConfigGenerator() - - with ( - patch.object(generator, "_validate_python_path", return_value=True), - patch.object(generator, "_validate_directory", return_value=True), - ): - config = generator.generate_config( - additional_env={"DEBUG": "true", "LOG_LEVEL": "info"} - ) - - env = config["mcpServers"]["m3"]["env"] - assert env["DEBUG"] == "true" - assert env["LOG_LEVEL"] == "info" - assert env["M3_BACKEND"] == "sqlite" # Default should still be there - - def test_validation_invalid_python_path(self): - """Test that invalid Python path raises error.""" - generator = MCPConfigGenerator() - - with ( - patch.object(generator, "_validate_python_path", return_value=False), - patch.object(generator, "_validate_directory", return_value=True), - ): - with pytest.raises(ValueError, match="Invalid Python path"): - generator.generate_config(python_path="/invalid/python") - - def test_validation_invalid_directory(self): - """Test that invalid working directory raises error.""" - generator = MCPConfigGenerator() - - with ( - patch.object(generator, "_validate_python_path", return_value=True), - patch.object(generator, "_validate_directory", return_value=False), - ): - with pytest.raises(ValueError, match="Invalid working directory"): - generator.generate_config(working_directory="/invalid/dir") diff --git a/tests/test_data_io.py b/tests/test_data_io.py deleted file mode 100644 index cc7e44f..0000000 --- a/tests/test_data_io.py +++ /dev/null @@ -1,50 +0,0 @@ -import requests - -from m3.data_io import COMMON_USER_AGENT, _scrape_urls_from_html_page - - -class DummyResponse: - def __init__(self, content, status_code=200, headers=None): - self.content = content.encode() - self.status_code = status_code - self.headers = headers or {} - - def raise_for_status(self): - if not (200 <= self.status_code < 300): - raise requests.exceptions.HTTPError(response=self) - - @property - def reason(self): - return "Error" - - def iter_content(self, chunk_size=1): - yield from self.content - - -def test_scrape_urls(monkeypatch): - html = ( - "" - 'ok' - 'no' - "" - ) - dummy = DummyResponse(html) - session = requests.Session() - monkeypatch.setattr(session, "get", lambda url, timeout=None: dummy) - urls = _scrape_urls_from_html_page("http://example.com/", session) - assert urls == ["http://example.com/file1.csv.gz"] - - -def test_scrape_no_matching_suffix(monkeypatch): - html = 'ok' - dummy = DummyResponse(html) - session = requests.Session() - monkeypatch.setattr(session, "get", lambda url, timeout=None: dummy) - urls = _scrape_urls_from_html_page("http://example.com/", session) - assert urls == [] - - -def test_common_user_agent_header(): - # Ensure the constant is set and looks like a UA string - assert isinstance(COMMON_USER_AGENT, str) - assert "Mozilla/" in COMMON_USER_AGENT diff --git a/tests/test_example.py b/tests/test_example.py deleted file mode 100644 index 191e266..0000000 --- a/tests/test_example.py +++ /dev/null @@ -1,6 +0,0 @@ -def test_always_passes(): - """ - A simple placeholder test that always passes. - This ensures the test runner is configured correctly. - """ - assert True diff --git a/tests/test_mcp_server.py b/tests/test_mcp_server.py deleted file mode 100644 index 334ad5f..0000000 --- a/tests/test_mcp_server.py +++ /dev/null @@ -1,361 +0,0 @@ -""" -Tests for the MCP server functionality. -""" - -import os -import sqlite3 -from pathlib import Path -from unittest.mock import Mock, patch - -import pytest -from fastmcp import Client - -# Mock the database path check during import to handle CI environments -with patch("pathlib.Path.exists", return_value=True): - with patch( - "m3.mcp_server.get_default_database_path", return_value=Path("/fake/test.db") - ): - from m3.mcp_server import _init_backend, mcp - - -def _bigquery_available(): - """Check if BigQuery dependencies are available.""" - try: - import importlib.util - - return importlib.util.find_spec("google.cloud.bigquery") is not None - except ImportError: - return False - - -class TestMCPServerSetup: - """Test MCP server setup and configuration.""" - - def test_server_instance_exists(self): - """Test that the FastMCP server instance exists.""" - assert mcp is not None - assert mcp.name == "m3" - - def test_backend_init_sqlite_default(self): - """Test SQLite backend initialization with defaults.""" - with patch.dict(os.environ, {"M3_BACKEND": "sqlite"}, clear=True): - with patch("m3.mcp_server.get_default_database_path") as mock_path: - mock_path.return_value = Path("/fake/path.db") - with patch("pathlib.Path.exists", return_value=True): - _init_backend() - # If no exception raised, initialization succeeded - - def test_backend_init_sqlite_custom_path(self): - """Test SQLite backend initialization with custom path.""" - with patch.dict( - os.environ, - {"M3_BACKEND": "sqlite", "M3_DB_PATH": "/custom/path.db"}, - clear=True, - ): - with patch("pathlib.Path.exists", return_value=True): - _init_backend() - # If no exception raised, initialization succeeded - - def test_backend_init_sqlite_missing_db(self): - """Test SQLite backend initialization with missing database.""" - with patch.dict(os.environ, {"M3_BACKEND": "sqlite"}, clear=True): - with patch("m3.mcp_server.get_default_database_path") as mock_path: - mock_path.return_value = Path("/fake/path.db") - with patch("pathlib.Path.exists", return_value=False): - with pytest.raises(FileNotFoundError): - _init_backend() - - @pytest.mark.skipif( - not _bigquery_available(), reason="BigQuery dependencies not available" - ) - def test_backend_init_bigquery(self): - """Test BigQuery backend initialization.""" - with patch.dict( - os.environ, - {"M3_BACKEND": "bigquery", "M3_PROJECT_ID": "test-project"}, - clear=True, - ): - with patch("google.cloud.bigquery.Client") as mock_client: - mock_client.return_value = Mock() - _init_backend() - # If no exception raised, initialization succeeded - mock_client.assert_called_once_with(project="test-project") - - def test_backend_init_invalid(self): - """Test initialization with invalid backend.""" - with patch.dict(os.environ, {"M3_BACKEND": "invalid"}, clear=True): - with pytest.raises(ValueError, match="Unsupported backend"): - _init_backend() - - -class TestMCPTools: - """Test MCP tools functionality.""" - - @pytest.fixture - def test_db(self, tmp_path): - """Create a test SQLite database.""" - db_path = tmp_path / "test.db" - - # Create test database with MIMIC-IV-like structure - conn = sqlite3.connect(db_path) - cursor = conn.cursor() - - # Create icu_icustays table - cursor.execute(""" - CREATE TABLE icu_icustays ( - subject_id INTEGER, - hadm_id INTEGER, - stay_id INTEGER, - intime TEXT, - outtime TEXT - ) - """) - cursor.execute(""" - INSERT INTO icu_icustays (subject_id, hadm_id, stay_id, intime, outtime) - VALUES - (10000032, 20000001, 30000001, '2180-07-23 15:00:00', '2180-07-24 12:00:00'), - (10000033, 20000002, 30000002, '2180-08-15 10:30:00', '2180-08-16 14:15:00') - """) - - # Create hosp_labevents table - cursor.execute(""" - CREATE TABLE hosp_labevents ( - subject_id INTEGER, - hadm_id INTEGER, - itemid INTEGER, - charttime TEXT, - value TEXT - ) - """) - cursor.execute(""" - INSERT INTO hosp_labevents (subject_id, hadm_id, itemid, charttime, value) - VALUES - (10000032, 20000001, 50912, '2180-07-23 16:00:00', '120'), - (10000033, 20000002, 50912, '2180-08-15 11:00:00', '95') - """) - - conn.commit() - conn.close() - - return str(db_path) - - @pytest.mark.asyncio - async def test_tools_via_client(self, test_db): - """Test MCP tools through the FastMCP client.""" - # Set up environment for SQLite backend with OAuth2 disabled - with patch.dict( - os.environ, - { - "M3_BACKEND": "sqlite", - "M3_DB_PATH": test_db, - "M3_OAUTH2_ENABLED": "false", - }, - clear=True, - ): - # Initialize backend - _init_backend() - - # Test via FastMCP client - async with Client(mcp) as client: - # Test execute_mimic_query tool - result = await client.call_tool( - "execute_mimic_query", - {"sql_query": "SELECT COUNT(*) as count FROM icu_icustays"}, - ) - result_text = str(result) - assert "count" in result_text - assert "2" in result_text - - # Test get_icu_stays tool - result = await client.call_tool( - "get_icu_stays", {"patient_id": 10000032, "limit": 10} - ) - result_text = str(result) - assert "10000032" in result_text - - # Test get_lab_results tool - result = await client.call_tool( - "get_lab_results", {"patient_id": 10000032, "limit": 20} - ) - result_text = str(result) - assert "10000032" in result_text - - # Test get_database_schema tool - result = await client.call_tool("get_database_schema", {}) - result_text = str(result) - assert "icu_icustays" in result_text or "hosp_labevents" in result_text - - @pytest.mark.asyncio - async def test_security_checks(self, test_db): - """Test SQL injection protection.""" - with patch.dict( - os.environ, - { - "M3_BACKEND": "sqlite", - "M3_DB_PATH": test_db, - "M3_OAUTH2_ENABLED": "false", - }, - clear=True, - ): - _init_backend() - - async with Client(mcp) as client: - # Test dangerous queries are blocked - dangerous_queries = [ - "UPDATE icu_icustays SET subject_id = 999", - "DELETE FROM icu_icustays", - "INSERT INTO icu_icustays VALUES (1, 2, 3, '2020-01-01', '2020-01-02')", - "DROP TABLE icu_icustays", - "CREATE TABLE test (id INTEGER)", - "ALTER TABLE icu_icustays ADD COLUMN test TEXT", - ] - - for query in dangerous_queries: - result = await client.call_tool( - "execute_mimic_query", {"sql_query": query} - ) - result_text = str(result) - assert ( - "Security Error:" in result_text - and "Only SELECT" in result_text - ) - - @pytest.mark.asyncio - async def test_invalid_sql(self, test_db): - """Test handling of invalid SQL.""" - with patch.dict( - os.environ, - { - "M3_BACKEND": "sqlite", - "M3_DB_PATH": test_db, - "M3_OAUTH2_ENABLED": "false", - }, - clear=True, - ): - _init_backend() - - async with Client(mcp) as client: - result = await client.call_tool( - "execute_mimic_query", {"sql_query": "INVALID SQL QUERY"} - ) - result_text = str(result) - assert "Query Failed:" in result_text and "syntax error" in result_text - - @pytest.mark.asyncio - async def test_empty_results(self, test_db): - """Test handling of queries with no results.""" - with patch.dict( - os.environ, - { - "M3_BACKEND": "sqlite", - "M3_DB_PATH": test_db, - "M3_OAUTH2_ENABLED": "false", - }, - clear=True, - ): - _init_backend() - - async with Client(mcp) as client: - result = await client.call_tool( - "execute_mimic_query", - { - "sql_query": "SELECT * FROM icu_icustays WHERE subject_id = 999999" - }, - ) - result_text = str(result) - assert "No results found" in result_text - - @pytest.mark.asyncio - async def test_oauth2_authentication_required(self, test_db): - """Test that OAuth2 authentication is required when enabled.""" - # Set up environment for SQLite backend with OAuth2 enabled - with patch.dict( - os.environ, - { - "M3_BACKEND": "sqlite", - "M3_DB_PATH": test_db, - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - }, - clear=True, - ): - _init_backend() - - async with Client(mcp) as client: - # Test that tools require authentication - result = await client.call_tool( - "execute_mimic_query", - {"sql_query": "SELECT COUNT(*) FROM icu_icustays"}, - ) - result_text = str(result) - assert "Missing OAuth2 access token" in result_text - - -class TestBigQueryIntegration: - """Test BigQuery integration with mocks (no real API calls).""" - - @pytest.mark.skipif( - not _bigquery_available(), reason="BigQuery dependencies not available" - ) - @pytest.mark.asyncio - async def test_bigquery_tools(self): - """Test BigQuery tools functionality with mocks.""" - with patch.dict( - os.environ, - {"M3_BACKEND": "bigquery", "M3_PROJECT_ID": "test-project"}, - clear=True, - ): - with patch("google.cloud.bigquery.Client") as mock_client: - # Mock BigQuery client and query results - mock_job = Mock() - mock_df = Mock() - mock_df.empty = False - mock_df.to_string.return_value = "Mock BigQuery result" - mock_df.__len__ = Mock(return_value=5) - mock_job.to_dataframe.return_value = mock_df - - mock_client_instance = Mock() - mock_client_instance.query.return_value = mock_job - mock_client.return_value = mock_client_instance - - _init_backend() - - async with Client(mcp) as client: - # Test execute_mimic_query tool - result = await client.call_tool( - "execute_mimic_query", - { - "sql_query": "SELECT COUNT(*) FROM `physionet-data.mimiciv_3_1_icu.icustays`" - }, - ) - result_text = str(result) - assert "Mock BigQuery result" in result_text - - # Test get_race_distribution tool - result = await client.call_tool( - "get_race_distribution", {"limit": 5} - ) - result_text = str(result) - assert "Mock BigQuery result" in result_text - - # Verify BigQuery client was called - mock_client.assert_called_once_with(project="test-project") - assert mock_client_instance.query.called - - -class TestServerIntegration: - """Test overall server integration.""" - - def test_server_main_function_exists(self): - """Test that the main function exists and is callable.""" - from m3.mcp_server import main - - assert callable(main) - - def test_server_can_be_imported_as_module(self): - """Test that the server can be imported as a module.""" - import m3.mcp_server - - assert hasattr(m3.mcp_server, "mcp") - assert hasattr(m3.mcp_server, "main") diff --git a/tests/test_oauth2_basic.py b/tests/test_oauth2_basic.py deleted file mode 100644 index 1931de3..0000000 --- a/tests/test_oauth2_basic.py +++ /dev/null @@ -1,208 +0,0 @@ -""" -Basic OAuth2 authentication tests. -""" - -import os -from unittest.mock import patch - -import pytest - -from m3.auth import ( - OAuth2Config, - init_oauth2, - is_oauth2_enabled, - require_oauth2, -) - - -class TestOAuth2BasicConfig: - """Test basic OAuth2 configuration.""" - - def test_oauth2_disabled_by_default(self): - """Test that OAuth2 is disabled by default.""" - with patch.dict(os.environ, {}, clear=True): - config = OAuth2Config() - assert not config.enabled - - def test_oauth2_enabled_configuration(self): - """Test OAuth2 enabled configuration.""" - env_vars = { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - "M3_OAUTH2_REQUIRED_SCOPES": "read:mimic-data,write:mimic-data", - } - - with patch.dict(os.environ, env_vars, clear=True): - config = OAuth2Config() - assert config.enabled - assert config.issuer_url == "https://auth.example.com" - assert config.audience == "m3-api" - assert config.required_scopes == {"read:mimic-data", "write:mimic-data"} - - def test_oauth2_invalid_configuration_raises_error(self): - """Test that invalid OAuth2 configuration raises an error.""" - with patch.dict(os.environ, {"M3_OAUTH2_ENABLED": "true"}, clear=True): - with pytest.raises(ValueError, match="M3_OAUTH2_ISSUER_URL is required"): - OAuth2Config() - - def test_jwks_url_auto_discovery(self): - """Test automatic JWKS URL discovery.""" - env_vars = { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - } - - with patch.dict(os.environ, env_vars, clear=True): - config = OAuth2Config() - assert config.jwks_url == "https://auth.example.com/.well-known/jwks.json" - - def test_scope_parsing(self): - """Test scope parsing from environment variable.""" - config = OAuth2Config() - - # Test comma-separated scopes - scopes = config._parse_scopes("read:data, write:data, admin") - assert scopes == {"read:data", "write:data", "admin"} - - # Test empty scopes - scopes = config._parse_scopes("") - assert scopes == set() - - -class TestOAuth2BasicIntegration: - """Test basic OAuth2 integration functions.""" - - def test_init_oauth2_disabled(self): - """Test OAuth2 initialization when disabled.""" - with patch.dict(os.environ, {}, clear=True): - init_oauth2() - assert not is_oauth2_enabled() - - def test_init_oauth2_enabled(self): - """Test OAuth2 initialization when enabled.""" - env_vars = { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - } - - with patch.dict(os.environ, env_vars, clear=True): - init_oauth2() - assert is_oauth2_enabled() - - -class TestOAuth2BasicDecorator: - """Test basic OAuth2 decorator functionality.""" - - def setup_method(self): - """Set up test fixtures.""" - # Reset global state - import m3.auth - - m3.auth._oauth2_config = None - m3.auth._oauth2_validator = None - - def test_decorator_with_oauth2_disabled(self): - """Test decorator behavior when OAuth2 is disabled.""" - - @require_oauth2 - def test_function(): - return "success" - - with patch.dict(os.environ, {}, clear=True): - init_oauth2() - - # Should allow access when OAuth2 is disabled - result = test_function() - assert result == "success" - - def test_decorator_with_missing_token(self): - """Test decorator behavior with missing token.""" - - @require_oauth2 - def test_function(): - return "success" - - env_vars = { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - } - - with patch.dict(os.environ, env_vars, clear=True): - init_oauth2() - - # Should return error when token is missing - result = test_function() - assert "Missing OAuth2 access token" in result - - def test_decorator_with_invalid_token_format(self): - """Test decorator behavior with invalid token format.""" - - @require_oauth2 - def test_function(): - return "success" - - env_vars = { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - "M3_OAUTH2_TOKEN": "invalid-token", - } - - with patch.dict(os.environ, env_vars, clear=True): - init_oauth2() - - # Should return error with invalid token format - result = test_function() - assert "Invalid token format" in result - - def test_decorator_with_valid_jwt_format(self): - """Test decorator behavior with valid JWT format.""" - - @require_oauth2 - def test_function(): - return "success" - - # Valid JWT format (header.payload.signature) - valid_jwt = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.signature" - - env_vars = { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - "M3_OAUTH2_TOKEN": f"Bearer {valid_jwt}", - } - - with patch.dict(os.environ, env_vars, clear=True): - init_oauth2() - - # Should work with valid JWT format - result = test_function() - assert result == "success" - - def test_decorator_with_bearer_prefix_removal(self): - """Test that Bearer prefix is correctly removed.""" - - @require_oauth2 - def test_function(): - return "success" - - # Valid JWT format (header.payload.signature) - valid_jwt = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.signature" - - env_vars = { - "M3_OAUTH2_ENABLED": "true", - "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", - "M3_OAUTH2_AUDIENCE": "m3-api", - "M3_OAUTH2_TOKEN": f"Bearer {valid_jwt}", - } - - with patch.dict(os.environ, env_vars, clear=True): - init_oauth2() - - # Should work even with Bearer prefix - result = test_function() - assert result == "success" diff --git a/tests/tools/test_tools.py b/tests/tools/test_tools.py new file mode 100644 index 0000000..c288ecd --- /dev/null +++ b/tests/tools/test_tools.py @@ -0,0 +1,971 @@ +import json +import logging +import os +import sqlite3 +import tempfile +import zlib +from pathlib import Path +from unittest.mock import MagicMock, Mock, patch + +import polars as pl +import pytest +import requests +from beartype.typing import Dict, List +from fastmcp import Client +from typer.testing import CliRunner + +from m3 import M3CLI +from m3.core.config import M3Config +from m3.core.tool.backend.backends.bigquery import BigQueryBackend +from m3.core.tool.backend.backends.sqlite import SQLiteBackend +from m3.core.utils.exceptions import M3ConfigError, M3ValidationError +from m3.m3 import M3 +from m3.tools.mimic.components.auth import Auth +from m3.tools.mimic.components.data_io import COMMON_USER_AGENT, DataIO +from m3.tools.mimic.components.utils import ( + get_dataset_config, + get_dataset_raw_files_path, + get_default_database_path, +) +from m3.tools.mimic.mimic import MIMIC + + +def _bigquery_available() -> bool: + """Check if BigQuery dependencies are available.""" + try: + import importlib.util + + return importlib.util.find_spec("google.cloud.bigquery") is not None + except ImportError: + return False + + +def _strip_ansi_codes(text: str) -> str: + """Strip ANSI escape codes from text.""" + import re + + ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") + return ansi_escape.sub("", text) + + +runner = CliRunner() +app = M3CLI().app + + +class DummyResponse: + """Dummy response for requests mocking.""" + + def __init__( + self, + content: str | bytes, + status_code: int = 200, + headers: Dict[str, str] | None = None, + ) -> None: + self.content = content.encode() if isinstance(content, str) else content + self.status_code = status_code + self.headers = headers or {} + + def raise_for_status(self) -> None: + if not (200 <= self.status_code < 300): + raise requests.exceptions.HTTPError(response=self) + + @property + def reason(self) -> str: + return "Error" + + def iter_content(self, chunk_size: int = 1) -> List[bytes]: + yield from [ + self.content[i : i + chunk_size] + for i in range(0, len(self.content), chunk_size) + ] + + +@pytest.fixture +def m3_config() -> M3Config: + """Fixture for M3Config instance.""" + return M3Config() + + +@pytest.fixture +def data_io(m3_config: M3Config) -> DataIO: + """Fixture for DataIO instance.""" + return DataIO(config=m3_config) + + +@pytest.fixture +def mock_session() -> MagicMock: + """Fixture for mocked requests Session.""" + return MagicMock(spec=requests.Session) + + +class TestMimic: + """Comprehensive tests for MIMIC tool components.""" + + def test_oauth2_disabled_by_default(self) -> None: + """Test that OAuth2 is disabled by default when M3_OAUTH2_ENABLED is not set or false.""" + config: M3Config = M3Config(env_vars={}) + auth: Auth = Auth(config) + assert not auth.enabled + + def test_oauth2_invalid_configuration_missing_issuer(self) -> None: + """Test that missing issuer URL raises M3ConfigError.""" + config: M3Config = M3Config(env_vars={"M3_OAUTH2_ENABLED": "true"}) + with pytest.raises( + M3ConfigError, match="Missing required env var: M3_OAUTH2_ISSUER_URL" + ): + Auth(config) + + def test_oauth2_enabled_configuration(self) -> None: + """Test that OAuth2 is enabled with proper configuration.""" + env_vars: dict[str, str] = { + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + "M3_OAUTH2_REQUIRED_SCOPES": "read:mimic-data,write:mimic-data", + } + config: M3Config = M3Config(env_vars=env_vars) + auth: Auth = Auth(config) + assert auth.enabled + assert auth.issuer_url == "https://auth.example.com" + assert auth.audience == "m3-api" + assert auth.required_scopes == {"read:mimic-data", "write:mimic-data"} + + def test_jwks_url_auto_discovery(self) -> None: + """Test that JWKS URL is auto-discovered from issuer URL when not set.""" + env_vars: dict[str, str] = { + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + } + config: M3Config = M3Config(env_vars=env_vars) + auth: Auth = Auth(config) + assert auth.jwks_url == "https://auth.example.com/.well-known/jwks.json" + + def test_jwks_url_custom(self) -> None: + """Test that a custom JWKS URL is used when provided.""" + env_vars: dict[str, str] = { + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + "M3_OAUTH2_JWKS_URL": "https://custom-jwks.example.com", + } + config: M3Config = M3Config(env_vars=env_vars) + auth: Auth = Auth(config) + assert auth.jwks_url == "https://custom-jwks.example.com" + + def test_scope_parsing(self) -> None: + """Test that required scopes are correctly parsed from environment variable.""" + env_vars: dict[str, str] = { + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + "M3_OAUTH2_REQUIRED_SCOPES": "read:mimic-data, write:mimic-data , admin", + } + config: M3Config = M3Config(env_vars=env_vars) + auth: Auth = Auth(config) + assert auth.required_scopes == {"read:mimic-data", "write:mimic-data", "admin"} + + @pytest.mark.asyncio + async def test_decorator_oauth2_disabled(self) -> None: + """Test that decorator allows access when OAuth2 is disabled.""" + config: M3Config = M3Config(env_vars={"M3_OAUTH2_ENABLED": "false"}) + auth: Auth = Auth(config) + + @auth.decorator + def sync_func() -> str: + return "success" + + result: str = await sync_func() + assert result == "success" + + @pytest.mark.asyncio + async def test_decorator_oauth2_disabled_async(self) -> None: + """Test that decorator allows access for async functions when OAuth2 is disabled.""" + config: M3Config = M3Config(env_vars={"M3_OAUTH2_ENABLED": "false"}) + auth: Auth = Auth(config) + + @auth.decorator + async def async_func() -> str: + return "success" + + result: str = await async_func() + assert result == "success" + + @pytest.mark.asyncio + async def test_decorator_missing_token(self) -> None: + """Test that decorator raises an error when token is missing and OAuth2 is enabled.""" + config: M3Config = M3Config( + env_vars={ + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + } + ) + auth: Auth = Auth(config) + + @auth.decorator + def sync_func() -> str: + return "success" + + with pytest.raises(M3ValidationError, match="Missing OAuth2 access token"): + await sync_func() + + @pytest.mark.asyncio + async def test_decorator_invalid_token(self) -> None: + """Test that decorator raises an error when token is invalid.""" + config = M3Config( + env_vars={ + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + "M3_OAUTH2_TOKEN": "Bearer invalid_token", + } + ) + auth: Auth = Auth(config) + + with patch.object( + auth, "authenticate", side_effect=M3ValidationError("Invalid token") + ): + + @auth.decorator + def sync_func() -> str: + return "success" + + with pytest.raises(M3ValidationError, match="Invalid token"): + await sync_func() + + @pytest.mark.asyncio + async def test_decorator_valid_token(self) -> None: + """Test that decorator allows access with a valid token.""" + config = M3Config( + env_vars={ + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + "M3_OAUTH2_TOKEN": "Bearer valid_token", + "M3_OAUTH2_REQUIRED_SCOPES": "read:mimic-data", + } + ) + auth = Auth(config) + + with patch.object( + auth, + "authenticate", + return_value={"sub": "test-user", "scope": "read:mimic-data"}, + ): + + @auth.decorator + def sync_func() -> str: + return "success" + + result: str = await sync_func() + assert result == "success" + + @pytest.mark.asyncio + async def test_decorator_missing_scope(self) -> None: + """Test that decorator raises an error when required scopes.""" + config = M3Config( + env_vars={ + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + "M3_OAUTH2_TOKEN": "Bearer valid_token", + "M3_OAUTH2_REQUIRED_SCOPES": "read:mimic-data,write:mimic-data", + } + ) + auth: Auth = Auth(config) + + with patch.object( + auth, + "authenticate", + side_effect=M3ValidationError( + "Missing required scopes: {'write:mimic-data'}" + ), + ): + + @auth.decorator + def sync_func() -> str: + return "success" + + with pytest.raises( + M3ValidationError, match="Missing required scopes: {'write:mimic-data'}" + ): + await sync_func() + + @pytest.mark.asyncio + async def test_decorator_bearer_prefix(self) -> None: + """Test that decorator handles tokens with 'Bearer ' prefix correctly.""" + config = M3Config( + env_vars={ + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + " M3_OAUTH2_AUDIENCE": "m3-api", + " M3_OAUTH2_TOKEN": "Bearer valid_token", + "M3_OAUTH2_REQUIRED_SCOPES": "read:mimic-data", + } + ) + auth: Auth = Auth(config) + + with patch.object( + auth, + "authenticate", + return_value={"sub": "test-user", "scope": "read:mimic-data"}, + ): + + @auth.decorator + def sync_func() -> str: + return "success" + + result: str = await sync_func() + assert result == "success" + + def test_common_user_agent_header(self) -> None: + """Test COMMON_USER_AGENT is properly set.""" + assert isinstance(COMMON_USER_AGENT, str) + assert "Mozilla/" in COMMON_USER_AGENT + + def test_scrape_urls_from_html_page( + self, data_io: DataIO, mock_session: MagicMock + ) -> None: + """Test scraping .csv.gz URLs from HTML page.""" + html = ( + "" + 'ok' + 'no' + "" + ) + dummy = DummyResponse(html) + with patch.object(mock_session, "get", return_value=dummy): + urls = data_io._scrape_urls_from_html_page( + "http://example.com/", mock_session + ) + assert urls == ["http://example.com/file1.csv.gz"] + + def test_scrape_no_matching_suffix( + self, data_io: DataIO, mock_session: MagicMock + ) -> None: + """Test no URLs scraped if no matching suffix.""" + html = 'ok' + dummy = DummyResponse(html) + with patch.object(mock_session, "get", return_value=dummy): + urls = data_io._scrape_urls_from_html_page( + "http://example.com/", mock_session + ) + assert urls == [] + + def test_scrape_urls_error_handling( + self, data_io: DataIO, mock_session: MagicMock, caplog: pytest.LogCaptureFixture + ) -> None: + """Test error handling in URL scraping.""" + with patch.object( + mock_session, "get", side_effect=requests.RequestException("Network error") + ): + with caplog.at_level(logging.ERROR): + urls = data_io._scrape_urls_from_html_page( + "http://example.com/", mock_session + ) + assert urls == [] + assert "Scrape failed" in caplog.text + + def test_download_single_file_success( + self, data_io: DataIO, mock_session: MagicMock, tmp_path: Path + ) -> None: + """Test successful file download.""" + content = "test content" + dummy = DummyResponse(content, headers={"content-length": str(len(content))}) + target_path = tmp_path / "test.csv.gz" + with patch.object(mock_session, "get", return_value=dummy): + success = data_io._download_single_file( + "http://example.com/test.csv.gz", target_path, mock_session + ) + assert success + assert target_path.exists() + assert target_path.read_text() == content + + def test_download_single_file_failure( + self, + data_io: DataIO, + mock_session: MagicMock, + tmp_path: Path, + caplog: pytest.LogCaptureFixture, + ) -> None: + """Test failed file download.""" + target_path = tmp_path / "test.csv.gz" + with ( + patch.object( + mock_session, + "get", + side_effect=requests.RequestException("Download error"), + ), + caplog.at_level(logging.ERROR), + ): + success = data_io._download_single_file( + "http://example.com/test.csv.gz", target_path, mock_session + ) + assert not success + assert not target_path.exists() + assert "Download failed" in caplog.text + + def test_load_csv_with_robust_parsing_success( + self, data_io: DataIO, tmp_path: Path + ) -> None: + """Test successful CSV loading with robust parsing.""" + csv_path = tmp_path / "test.csv.gz" + content = "col1,col2\n1,2\n3,4\n" + compressed = zlib.compress(content.encode()) + csv_path.write_bytes(compressed) + df = data_io._load_csv_with_robust_parsing(csv_path, "test_table") + assert isinstance(df, pl.DataFrame) + assert len(df) == 2 + assert df.columns == ["col1", "col2"] + + def test_etl_csv_collection_to_sqlite_success( + self, data_io: DataIO, tmp_path: Path + ) -> None: + """Test successful ETL from CSV to SQLite.""" + source_dir = tmp_path / "source" + source_dir.mkdir() + csv_path = source_dir / "test.csv.gz" + content = "col1,col2\n1,2" + compressed = zlib.compress(content.encode()) + csv_path.write_bytes(compressed) + db_path = tmp_path / "M3_test_environment_test.db" + success = data_io._etl_csv_collection_to_sqlite(source_dir, db_path) + assert success + assert db_path.exists() + if db_path.exists(): + db_path.unlink() + + def test_etl_csv_collection_to_sqlite_no_files( + self, data_io: DataIO, tmp_path: Path + ) -> None: + """Test ETL with no CSV files returns False.""" + source_dir = tmp_path / "empty" + source_dir.mkdir() + db_path = tmp_path / "M3_test_environment_test.db" + success = data_io._etl_csv_collection_to_sqlite(source_dir, db_path) + assert not success + if db_path.exists(): + db_path.unlink() + + @patch("m3.tools.mimic.components.data_io.get_dataset_config") + @patch("m3.tools.mimic.components.data_io.get_dataset_raw_files_path") + @patch("m3.tools.mimic.components.data_io.DataIO._download_dataset_files") + @patch("m3.tools.mimic.components.data_io.DataIO._etl_csv_collection_to_sqlite") + def test_initialize_success( + self, + mock_etl: MagicMock, + mock_download: MagicMock, + mock_raw_path: MagicMock, + mock_dataset_config: MagicMock, + data_io: DataIO, + tmp_path: Path, + ) -> None: + """Test successful dataset initialization.""" + mock_dataset_config.return_value = {"file_listing_url": "http://example.com"} + mock_raw_path.return_value = tmp_path / "raw" + mock_download.return_value = True + mock_etl.return_value = True + success = data_io.initialize("test_dataset", tmp_path / "db.sqlite") + assert success + + @patch("m3.tools.mimic.components.data_io.get_dataset_config") + def test_initialize_invalid_dataset( + self, mock_dataset_config: MagicMock, data_io: DataIO, tmp_path: Path + ) -> None: + """Test initialization with invalid dataset raises error.""" + mock_dataset_config.return_value = None + with pytest.raises(M3ValidationError, match="Config not found"): + data_io.initialize("invalid", tmp_path / "db.sqlite") + + @patch("m3.tools.mimic.components.data_io.get_dataset_config") + @patch("m3.tools.mimic.components.data_io.get_dataset_raw_files_path") + def test_initialize_invalid_raw_path( + self, + mock_raw_path: MagicMock, + mock_dataset_config: MagicMock, + data_io: DataIO, + tmp_path: Path, + ) -> None: + """Test initialization with invalid raw path raises error.""" + mock_dataset_config.return_value = {"file_listing_url": "http://example.com"} + mock_raw_path.return_value = None + with pytest.raises(M3ValidationError, match="Raw files path not found"): + data_io.initialize("mimic-iv-demo", tmp_path / "db.sqlite") + + @patch("m3.tools.mimic.components.data_io.DataIO._download_dataset_files") + def test_initialize_download_failure( + self, mock_download: MagicMock, data_io: DataIO, tmp_path: Path + ) -> None: + """Test initialization fails on download failure.""" + mock_download.return_value = False + success = data_io.initialize("mimic-iv-demo", tmp_path / "db.sqlite") + assert not success + + @patch("m3.tools.mimic.components.data_io.DataIO._download_dataset_files") + @patch("m3.tools.mimic.components.data_io.DataIO._etl_csv_collection_to_sqlite") + def test_initialize_etl_failure( + self, + mock_etl: MagicMock, + mock_download: MagicMock, + data_io: DataIO, + tmp_path: Path, + ) -> None: + """Test initialization fails on ETL failure.""" + mock_download.return_value = True + mock_etl.return_value = False + success = data_io.initialize("mimic-iv-demo", tmp_path / "db.sqlite") + assert not success + + def test_get_dataset_config_known(self, m3_config: M3Config) -> None: + """Test retrieving config for a known dataset.""" + cfg = get_dataset_config("mimic-iv-demo") + assert isinstance(cfg, dict) + assert cfg.get("default_db_filename") == "mimic_iv_demo.db" + + def test_get_dataset_config_unknown(self, m3_config: M3Config) -> None: + """Test retrieving config for an unknown dataset returns None.""" + assert get_dataset_config("not-a-dataset") is None + + def test_default_paths(self, m3_config: M3Config, tmp_path: Path) -> None: + """Test default path generation and directory creation.""" + with patch.object(m3_config, "databases_dir", tmp_path / "dbs"): + db_path = get_default_database_path(m3_config, "mimic-iv-demo") + + assert isinstance(db_path, Path) + assert db_path.parent == tmp_path / "dbs" / "mimic-iv-demo" + assert db_path.name == "mimic_iv_demo.db" + + with patch.object(m3_config, "raw_files_dir", tmp_path / "raw"): + raw_path = get_dataset_raw_files_path(m3_config, "mimic-iv-demo") + + assert isinstance(raw_path, Path) + assert raw_path.exists() + assert raw_path == tmp_path / "raw" / "mimic-iv-demo" + + def test_raw_path_includes_dataset_name( + self, m3_config: M3Config, tmp_path: Path + ) -> None: + """Test raw files path includes dataset name.""" + with patch.object(m3_config, "raw_files_dir", tmp_path / "raw"): + raw_path = get_dataset_raw_files_path(m3_config, "mimic-iv-demo") + assert "mimic-iv-demo" in str(raw_path) + assert raw_path.exists() + + @patch("m3.tools.mimic.components.data_io.DataIO.initialize") + @patch("sqlite3.connect") + def test_mimic_init_respects_custom_db_path( + self, + mock_sqlite_connect: MagicMock, + mock_initialize: MagicMock, + ) -> None: + """Test that m3 tools mimic init respects custom db path.""" + mock_initialize.return_value = True + mock_cursor = mock_sqlite_connect.return_value.cursor.return_value + mock_cursor.fetchone.return_value = (100,) + + with tempfile.TemporaryDirectory() as temp_dir: + custom_db_path = Path(temp_dir) / "custom_mimic.db" + result = runner.invoke( + app, + [ + "tools", + "mimic", + "init", + "--dataset", + "mimic-iv-demo", + "--db-path", + str(custom_db_path), + ], + ) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert str(custom_db_path) in output + mock_initialize.assert_called_once_with("mimic-iv-demo", custom_db_path) + + if custom_db_path.exists(): + custom_db_path.unlink() + + @patch("m3.core.config.M3Config") + def test_tools_mimic_status(self, mock_config: MagicMock) -> None: + """Test that tools mimic status displays info.""" + mock_config_instance = mock_config.return_value + mock_config_instance.get_env_var.side_effect = lambda k, d: d + result = runner.invoke(app, ["tools", "mimic", "status"]) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "MIMIC Tool Status" in output + assert "Backend" in output + assert "sqlite" in output + + def test_tools_mimic_configure(self, tmp_path: Path) -> None: + """Test that tools mimic configure generates config.""" + config_path = tmp_path / "config.json" + result = runner.invoke( + app, + [ + "tools", + "mimic", + "configure", + "--backend", + "sqlite", + "--db-path", + "M3_test_environment_test.db", + "--output", + str(config_path), + ], + input="\n", + ) + assert result.exit_code == 0 + output = _strip_ansi_codes(result.stdout) + assert "โœ… Config dict saved to" in output + assert config_path.exists() + with open(config_path) as f: + config = json.load(f) + assert config["env_vars"]["M3_BACKEND"] == "sqlite" + assert config["env_vars"]["M3_DB_PATH"] == "M3_test_environment_test.db" + assert config["tool_params"]["backend_key"] == "sqlite" + assert len(config["tool_params"]["backends"]) == 1 + assert config["tool_params"]["backends"][0]["type"] == "sqlite" + assert ( + config["tool_params"]["backends"][0]["params"]["path"] + == "M3_test_environment_test.db" + ) + + if config_path.exists(): + config_path.unlink() + + if Path("M3_test_environment_test.db").exists(): + Path("M3_test_environment_test.db").unlink() + + @pytest.fixture + def test_db(self, tmp_path: Path) -> str: + """Create a test SQLite database.""" + db_path: Path = tmp_path / "M3_test_environment_test.db" + + conn: sqlite3.Connection = sqlite3.connect(db_path) + cursor: sqlite3.Cursor = conn.cursor() + + # Create icu_icustays table + cursor.execute( + """ + CREATE TABLE icu_icustays ( + subject_id INTEGER, + hadm_id INTEGER, + stay_id INTEGER, + intime TEXT, + outtime TEXT + ) + """ + ) + cursor.execute( + """ + INSERT INTO icu_icustays (subject_id, hadm_id, stay_id, intime, outtime) + VALUES + (10000032, 20000001, 30000001, '2180-07-23 15:00:00', '2180-07-24 12:00:00'), + (10000033, 20000002, 30000002, '2180-08-15 10:30:00', '2180-08-16 14:15:00') + """ + ) + + # Create hosp_labevents table + cursor.execute( + """ + CREATE TABLE hosp_labevents ( + subject_id INTEGER, + hadm_id INTEGER, + itemid INTEGER, + charttime TEXT, + value TEXT + ) + """ + ) + cursor.execute( + """ + INSERT INTO hosp_labevents (subject_id, hadm_id, itemid, charttime, value) + VALUES + (10000032, 20000001, 50912, '2180-07-23 16:00:00', '120'), + (10000033, 20000002, 50912, '2180-08-15 11:00:00', '95') + """ + ) + + # Create hosp_admissions table for race distribution + cursor.execute( + """ + CREATE TABLE hosp_admissions ( + subject_id INTEGER, + hadm_id INTEGER, + race TEXT + ) + """ + ) + cursor.execute( + """ + INSERT INTO hosp_admissions (subject_id, hadm_id, race) + VALUES + (10000032, 20000001, 'WHITE'), + (10000033, 20000002, 'BLACK/AFRICAN AMERICAN') + """ + ) + + conn.commit() + conn.close() + + return str(db_path) + + @pytest.mark.asyncio + async def test_tools_via_client(self, test_db: str) -> None: + """Test MCP tools through the FastMCP client.""" + with patch.dict( + os.environ, + { + "M3_OAUTH2_ENABLED": "false", + }, + clear=True, + ): + config = M3Config(env_vars=os.environ.copy()) + mimic = MIMIC( + backends=[SQLiteBackend(path=test_db)], + config=config, + backend_key="sqlite", + ) + m3 = M3(config=config).with_tool(mimic) + m3.build() + + async with Client(m3.mcp) as client: # type: ignore + result: str = await client.call_tool( + "execute_mimic_query", + {"sql_query": "SELECT COUNT(*) as count FROM icu_icustays"}, + ) + result_text: str = str(result) + assert "count" in result_text + assert "2" in result_text + + result = await client.call_tool( + "get_icu_stays", {"patient_id": 10000032, "limit": 10} + ) + result_text = str(result) + assert "10000032" in result_text + + result = await client.call_tool( + "get_lab_results", {"patient_id": 10000032, "limit": 20} + ) + result_text = str(result) + assert "10000032" in result_text + + result = await client.call_tool("get_database_schema", {}) + result_text = str(result) + assert ( + "icu_icustays" in result_text + and "hosp_labevents" in result_text + and "hosp_admissions" in result_text + ) + + result = await client.call_tool( + "get_table_info", {"table_name": "icu_icustays"} + ) + result_text = str(result) + assert "subject_id" in result_text + assert "intime" in result_text + + result = await client.call_tool("get_race_distribution", {"limit": 5}) + result_text = str(result) + assert "WHITE" in result_text + assert "BLACK/AFRICAN AMERICAN" in result_text + + if Path(test_db).exists(): + Path(test_db).unlink() + + @pytest.mark.asyncio + async def test_security_checks(self, test_db: str) -> None: + """Test SQL injection protection.""" + with patch.dict( + os.environ, + { + "M3_OAUTH2_ENABLED": "false", + }, + clear=True, + ): + config = M3Config(env_vars=os.environ.copy()) + mimic = MIMIC( + backends=[SQLiteBackend(path=test_db)], + config=config, + backend_key="sqlite", + ) + m3 = M3(config=config).with_tool(mimic) + m3.build() + + async with Client(m3.mcp) as client: # type: ignore + # Test dangerous queries are blocked + dangerous_queries: list[str] = [ + "UPDATE icu_icustays SET subject_id = 999", + "DELETE FROM icu_icustays", + "INSERT INTO icu_icustays VALUES (1, 2, 3, '2020-01-01', '2020-01-02')", + "DROP TABLE icu_icustays", + "CREATE TABLE test (id INTEGER)", + "ALTER TABLE icu_icustays ADD COLUMN test TEXT", + ] + + for query in dangerous_queries: + result: str = await client.call_tool( + "execute_mimic_query", {"sql_query": query} + ) + result_text: str = str(result) + assert ( + "Security Error:" in result_text + and "Only SELECT" in result_text + ) + + if Path(test_db).exists(): + Path(test_db).unlink() + + @pytest.mark.asyncio + async def test_invalid_sql(self, test_db: str) -> None: + """Test handling of invalid SQL.""" + with patch.dict( + os.environ, + { + "M3_OAUTH2_ENABLED": "false", + }, + clear=True, + ): + config = M3Config(env_vars=os.environ.copy()) + mimic = MIMIC( + backends=[SQLiteBackend(path=test_db)], + config=config, + backend_key="sqlite", + ) + m3 = M3(config=config).with_tool(mimic) + m3.build() + + async with Client(m3.mcp) as client: # type: ignore + result: str = await client.call_tool( + "execute_mimic_query", {"sql_query": "INVALID SQL QUERY"} + ) + result_text: str = str(result) + assert "Query Failed:" in result_text and "syntax error" in result_text + + if Path(test_db).exists(): + Path(test_db).unlink() + + @pytest.mark.asyncio + async def test_empty_results(self, test_db: str) -> None: + """Test handling of queries with no results.""" + with patch.dict( + os.environ, + { + "M3_OAUTH2_ENABLED": "false", + }, + clear=True, + ): + config = M3Config(env_vars=os.environ.copy()) + mimic = MIMIC( + backends=[SQLiteBackend(path=test_db)], + config=config, + backend_key="sqlite", + ) + m3 = M3(config=config).with_tool(mimic) + m3.build() + + async with Client(m3.mcp) as client: # type: ignore + result: str = await client.call_tool( + "execute_mimic_query", + { + "sql_query": "SELECT * FROM icu_icustays WHERE subject_id = 999999" + }, + ) + result_text: str = str(result) + assert "No results found" in result_text + + if Path(test_db).exists(): + Path(test_db).unlink() + + @pytest.mark.asyncio + async def test_oauth2_authentication_required(self, test_db: str) -> None: + """Test that OAuth2 authentication is required when enabled.""" + with patch.dict( + os.environ, + { + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": "https://auth.example.com", + "M3_OAUTH2_AUDIENCE": "m3-api", + }, + clear=True, + ): + config = M3Config(env_vars=os.environ.copy()) + mimic = MIMIC( + backends=[SQLiteBackend(path=test_db)], + config=config, + backend_key="sqlite", + ) + m3 = M3(config=config).with_tool(mimic) + m3.build() + + async with Client(m3.mcp) as client: # type: ignore + result: str = await client.call_tool( + "execute_mimic_query", + {"sql_query": "SELECT COUNT(*) FROM icu_icustays"}, + raise_on_error=False, + ) + result_text: str = str(result) + assert "Missing OAuth2 access token" in result_text + + if Path(test_db).exists(): + Path(test_db).unlink() + + @pytest.mark.skipif( + not _bigquery_available(), reason="BigQuery dependencies not available" + ) + @pytest.mark.asyncio + async def test_bigquery_tools(self) -> None: + """Test BigQuery tools functionality with mocks.""" + with patch.dict( + os.environ, + { + "M3_PROJECT_ID": "test-project", + "GOOGLE_CLOUD_PROJECT": "test-project", + }, + clear=True, + ): + import pandas as pd + + with patch("google.auth.default") as mock_auth: + mock_auth.return_value = (Mock(), "test-project") + with patch("google.cloud.bigquery.Client") as mock_client: + mock_job: Mock = Mock() + mock_df: Mock = Mock(spec=pd.DataFrame) + mock_df.empty = False + mock_df.to_string.return_value = "Mock BigQuery result" + mock_df.__len__ = Mock(return_value=5) + mock_job.to_dataframe.return_value = mock_df + + mock_client_instance: Mock = Mock() + mock_client_instance.query.return_value = mock_job + mock_client.return_value = mock_client_instance + + config = M3Config(env_vars=os.environ.copy()) + mimic = MIMIC( + backends=[BigQueryBackend(project="test-project")], + config=config, + backend_key="bigquery", + ) + m3 = M3(config=config).with_tool(mimic) + m3.build() + + async with Client(m3.mcp) as client: # type: ignore + result: str = await client.call_tool( + "execute_mimic_query", + { + "sql_query": "SELECT COUNT(*) FROM `physionet-data.mimiciv_3_1_icu.icustays`" + }, + ) + result_text: str = str(result) + assert "Mock BigQuery result" in result_text + + result = await client.call_tool( + "get_race_distribution", {"limit": 5} + ) + result_text = str(result) + assert "Mock BigQuery result" in result_text + + mock_client.assert_called_once_with(project="test-project") + assert mock_client_instance.query.called From 2ab83b230c0079a67608120a3204c224dd1f31f9 Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 16:52:31 +0100 Subject: [PATCH 12/15] core: add deps. (Beartype, TheFuzz, ...) --- pyproject.toml | 49 +- uv.lock | 1209 +++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 1185 insertions(+), 73 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 923035b..6ee5dc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ maintainers = [ { name = "Rafi Al Attrach", email = "rafiaa@mit.edu" }, { name = "Pedro Moreira", email = "pedrojfm@mit.edu" }, { name = "Rajna Fani", email = "rajnaf@mit.edu" }, + {name = "Provost Simon", email = "sgp29@kent.ac.uk" }, ] readme = "README.md" license = "MIT" @@ -50,21 +51,27 @@ dependencies = [ "cryptography>=41.0.0", # Cryptographic operations for JWT "python-jose[cryptography]>=3.3.0", # Additional JWT support with crypto "httpx>=0.24.0", # Modern HTTP client for OAuth2 token validation + "pyaml>=25.7.0", + "beartype>=0.21.0", + "thefuzz>=0.22.1", + "rich-pyfiglet>=0.1.4", + "click==8.1.8", ] -[project.dependency-groups] +[dependency-groups] dev = [ "ruff>=0.4.0", "pre-commit>=3.0.0", "pytest>=7.4.0", "pytest-asyncio>=0.23.0", "pytest-mock>=3.10.0", - "aiohttp>=3.8.0", # For MCP client testing + "aiohttp>=3.8.0", + "pytest-cov>=6.2.1", ] [project.scripts] -m3 = "m3.cli:app" -m3-mcp-server = "m3.mcp_server:main" +m3 = "m3.cli:main_cli" +m3-mcp-server = "m3.core.__main__:main" [project.urls] Homepage = "https://github.com/rafiattrach/m3" @@ -90,6 +97,8 @@ select = [ "I", # isort (import sorting) "UP", # pyupgrade (modernize syntax) "RUF",# Ruff-specific rules + "B", # flake8-bugbear + "C4", # simplify comprehensions ] ignore = [ @@ -109,3 +118,35 @@ asyncio_default_fixture_loop_scope = "function" filterwarnings = [ "ignore::DeprecationWarning:jupyter_client.*", ] + +[tool.coverage.paths] +source = [ + "src/m3", +] + +[tool.coverage.run] +branch = true +omit = [ + "src/m3/**/base.py", + "src/m3/**/__init__.py", + "src/m3/**/configurations/*.yaml", + "tests/**", +] + +[tool.coverage.report] +exclude_also = [ + "def __repr__", + "if self\\.debug", + + "raise AssertionError", + "raise NotImplementedError", + + "if 0:", + "if __name__ == .__main__.:", + + "@(abc\\.)?abstractmethod", +] +ignore_errors = true + +[tool.coverage.html] +directory = "coverage_html_report" diff --git a/uv.lock b/uv.lock index e2af58c..b1c8d6e 100644 --- a/uv.lock +++ b/uv.lock @@ -8,6 +8,114 @@ resolution-markers = [ "python_full_version < '3.11'", ] +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.12.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "async-timeout", marker = "python_full_version < '3.11'" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e6/0b/e39ad954107ebf213a2325038a3e7a506be3d98e1435e1f82086eec4cde2/aiohttp-3.12.14.tar.gz", hash = "sha256:6e06e120e34d93100de448fd941522e11dafa78ef1a893c179901b7d66aa29f2", size = 7822921, upload-time = "2025-07-10T13:05:33.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/88/f161f429f9de391eee6a5c2cffa54e2ecd5b7122ae99df247f7734dfefcb/aiohttp-3.12.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:906d5075b5ba0dd1c66fcaaf60eb09926a9fef3ca92d912d2a0bbdbecf8b1248", size = 702641, upload-time = "2025-07-10T13:02:38.98Z" }, + { url = "https://files.pythonhosted.org/packages/fe/b5/24fa382a69a25d242e2baa3e56d5ea5227d1b68784521aaf3a1a8b34c9a4/aiohttp-3.12.14-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c875bf6fc2fd1a572aba0e02ef4e7a63694778c5646cdbda346ee24e630d30fb", size = 479005, upload-time = "2025-07-10T13:02:42.714Z" }, + { url = "https://files.pythonhosted.org/packages/09/67/fda1bc34adbfaa950d98d934a23900918f9d63594928c70e55045838c943/aiohttp-3.12.14-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbb284d15c6a45fab030740049d03c0ecd60edad9cd23b211d7e11d3be8d56fd", size = 466781, upload-time = "2025-07-10T13:02:44.639Z" }, + { url = "https://files.pythonhosted.org/packages/36/96/3ce1ea96d3cf6928b87cfb8cdd94650367f5c2f36e686a1f5568f0f13754/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38e360381e02e1a05d36b223ecab7bc4a6e7b5ab15760022dc92589ee1d4238c", size = 1648841, upload-time = "2025-07-10T13:02:46.356Z" }, + { url = "https://files.pythonhosted.org/packages/be/04/ddea06cb4bc7d8db3745cf95e2c42f310aad485ca075bd685f0e4f0f6b65/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:aaf90137b5e5d84a53632ad95ebee5c9e3e7468f0aab92ba3f608adcb914fa95", size = 1622896, upload-time = "2025-07-10T13:02:48.422Z" }, + { url = "https://files.pythonhosted.org/packages/73/66/63942f104d33ce6ca7871ac6c1e2ebab48b88f78b2b7680c37de60f5e8cd/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e532a25e4a0a2685fa295a31acf65e027fbe2bea7a4b02cdfbbba8a064577663", size = 1695302, upload-time = "2025-07-10T13:02:50.078Z" }, + { url = "https://files.pythonhosted.org/packages/20/00/aab615742b953f04b48cb378ee72ada88555b47b860b98c21c458c030a23/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eab9762c4d1b08ae04a6c77474e6136da722e34fdc0e6d6eab5ee93ac29f35d1", size = 1737617, upload-time = "2025-07-10T13:02:52.123Z" }, + { url = "https://files.pythonhosted.org/packages/d6/4f/ef6d9f77225cf27747368c37b3d69fac1f8d6f9d3d5de2d410d155639524/aiohttp-3.12.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abe53c3812b2899889a7fca763cdfaeee725f5be68ea89905e4275476ffd7e61", size = 1642282, upload-time = "2025-07-10T13:02:53.899Z" }, + { url = "https://files.pythonhosted.org/packages/37/e1/e98a43c15aa52e9219a842f18c59cbae8bbe2d50c08d298f17e9e8bafa38/aiohttp-3.12.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5760909b7080aa2ec1d320baee90d03b21745573780a072b66ce633eb77a8656", size = 1582406, upload-time = "2025-07-10T13:02:55.515Z" }, + { url = "https://files.pythonhosted.org/packages/71/5c/29c6dfb49323bcdb0239bf3fc97ffcf0eaf86d3a60426a3287ec75d67721/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:02fcd3f69051467bbaa7f84d7ec3267478c7df18d68b2e28279116e29d18d4f3", size = 1626255, upload-time = "2025-07-10T13:02:57.343Z" }, + { url = "https://files.pythonhosted.org/packages/79/60/ec90782084090c4a6b459790cfd8d17be2c5662c9c4b2d21408b2f2dc36c/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4dcd1172cd6794884c33e504d3da3c35648b8be9bfa946942d353b939d5f1288", size = 1637041, upload-time = "2025-07-10T13:02:59.008Z" }, + { url = "https://files.pythonhosted.org/packages/22/89/205d3ad30865c32bc472ac13f94374210745b05bd0f2856996cb34d53396/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:224d0da41355b942b43ad08101b1b41ce633a654128ee07e36d75133443adcda", size = 1612494, upload-time = "2025-07-10T13:03:00.618Z" }, + { url = "https://files.pythonhosted.org/packages/48/ae/2f66edaa8bd6db2a4cba0386881eb92002cdc70834e2a93d1d5607132c7e/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e387668724f4d734e865c1776d841ed75b300ee61059aca0b05bce67061dcacc", size = 1692081, upload-time = "2025-07-10T13:03:02.154Z" }, + { url = "https://files.pythonhosted.org/packages/08/3a/fa73bfc6e21407ea57f7906a816f0dc73663d9549da703be05dbd76d2dc3/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:dec9cde5b5a24171e0b0a4ca064b1414950904053fb77c707efd876a2da525d8", size = 1715318, upload-time = "2025-07-10T13:03:04.322Z" }, + { url = "https://files.pythonhosted.org/packages/e3/b3/751124b8ceb0831c17960d06ee31a4732cb4a6a006fdbfa1153d07c52226/aiohttp-3.12.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bbad68a2af4877cc103cd94af9160e45676fc6f0c14abb88e6e092b945c2c8e3", size = 1643660, upload-time = "2025-07-10T13:03:06.406Z" }, + { url = "https://files.pythonhosted.org/packages/81/3c/72477a1d34edb8ab8ce8013086a41526d48b64f77e381c8908d24e1c18f5/aiohttp-3.12.14-cp310-cp310-win32.whl", hash = "sha256:ee580cb7c00bd857b3039ebca03c4448e84700dc1322f860cf7a500a6f62630c", size = 428289, upload-time = "2025-07-10T13:03:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/a2/c4/8aec4ccf1b822ec78e7982bd5cf971113ecce5f773f04039c76a083116fc/aiohttp-3.12.14-cp310-cp310-win_amd64.whl", hash = "sha256:cf4f05b8cea571e2ccc3ca744e35ead24992d90a72ca2cf7ab7a2efbac6716db", size = 451328, upload-time = "2025-07-10T13:03:10.146Z" }, + { url = "https://files.pythonhosted.org/packages/53/e1/8029b29316971c5fa89cec170274582619a01b3d82dd1036872acc9bc7e8/aiohttp-3.12.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f4552ff7b18bcec18b60a90c6982049cdb9dac1dba48cf00b97934a06ce2e597", size = 709960, upload-time = "2025-07-10T13:03:11.936Z" }, + { url = "https://files.pythonhosted.org/packages/96/bd/4f204cf1e282041f7b7e8155f846583b19149e0872752711d0da5e9cc023/aiohttp-3.12.14-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8283f42181ff6ccbcf25acaae4e8ab2ff7e92b3ca4a4ced73b2c12d8cd971393", size = 482235, upload-time = "2025-07-10T13:03:14.118Z" }, + { url = "https://files.pythonhosted.org/packages/d6/0f/2a580fcdd113fe2197a3b9df30230c7e85bb10bf56f7915457c60e9addd9/aiohttp-3.12.14-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:040afa180ea514495aaff7ad34ec3d27826eaa5d19812730fe9e529b04bb2179", size = 470501, upload-time = "2025-07-10T13:03:16.153Z" }, + { url = "https://files.pythonhosted.org/packages/38/78/2c1089f6adca90c3dd74915bafed6d6d8a87df5e3da74200f6b3a8b8906f/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b413c12f14c1149f0ffd890f4141a7471ba4b41234fe4fd4a0ff82b1dc299dbb", size = 1740696, upload-time = "2025-07-10T13:03:18.4Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c8/ce6c7a34d9c589f007cfe064da2d943b3dee5aabc64eaecd21faf927ab11/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1d6f607ce2e1a93315414e3d448b831238f1874b9968e1195b06efaa5c87e245", size = 1689365, upload-time = "2025-07-10T13:03:20.629Z" }, + { url = "https://files.pythonhosted.org/packages/18/10/431cd3d089de700756a56aa896faf3ea82bee39d22f89db7ddc957580308/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:565e70d03e924333004ed101599902bba09ebb14843c8ea39d657f037115201b", size = 1788157, upload-time = "2025-07-10T13:03:22.44Z" }, + { url = "https://files.pythonhosted.org/packages/fa/b2/26f4524184e0f7ba46671c512d4b03022633bcf7d32fa0c6f1ef49d55800/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4699979560728b168d5ab63c668a093c9570af2c7a78ea24ca5212c6cdc2b641", size = 1827203, upload-time = "2025-07-10T13:03:24.628Z" }, + { url = "https://files.pythonhosted.org/packages/e0/30/aadcdf71b510a718e3d98a7bfeaea2396ac847f218b7e8edb241b09bd99a/aiohttp-3.12.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad5fdf6af93ec6c99bf800eba3af9a43d8bfd66dce920ac905c817ef4a712afe", size = 1729664, upload-time = "2025-07-10T13:03:26.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/7f/7ccf11756ae498fdedc3d689a0c36ace8fc82f9d52d3517da24adf6e9a74/aiohttp-3.12.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ac76627c0b7ee0e80e871bde0d376a057916cb008a8f3ffc889570a838f5cc7", size = 1666741, upload-time = "2025-07-10T13:03:28.167Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4d/35ebc170b1856dd020c92376dbfe4297217625ef4004d56587024dc2289c/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:798204af1180885651b77bf03adc903743a86a39c7392c472891649610844635", size = 1715013, upload-time = "2025-07-10T13:03:30.018Z" }, + { url = "https://files.pythonhosted.org/packages/7b/24/46dc0380146f33e2e4aa088b92374b598f5bdcde1718c77e8d1a0094f1a4/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:4f1205f97de92c37dd71cf2d5bcfb65fdaed3c255d246172cce729a8d849b4da", size = 1710172, upload-time = "2025-07-10T13:03:31.821Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0a/46599d7d19b64f4d0fe1b57bdf96a9a40b5c125f0ae0d8899bc22e91fdce/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:76ae6f1dd041f85065d9df77c6bc9c9703da9b5c018479d20262acc3df97d419", size = 1690355, upload-time = "2025-07-10T13:03:34.754Z" }, + { url = "https://files.pythonhosted.org/packages/08/86/b21b682e33d5ca317ef96bd21294984f72379454e689d7da584df1512a19/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a194ace7bc43ce765338ca2dfb5661489317db216ea7ea700b0332878b392cab", size = 1783958, upload-time = "2025-07-10T13:03:36.53Z" }, + { url = "https://files.pythonhosted.org/packages/4f/45/f639482530b1396c365f23c5e3b1ae51c9bc02ba2b2248ca0c855a730059/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:16260e8e03744a6fe3fcb05259eeab8e08342c4c33decf96a9dad9f1187275d0", size = 1804423, upload-time = "2025-07-10T13:03:38.504Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e5/39635a9e06eed1d73671bd4079a3caf9cf09a49df08490686f45a710b80e/aiohttp-3.12.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:8c779e5ebbf0e2e15334ea404fcce54009dc069210164a244d2eac8352a44b28", size = 1717479, upload-time = "2025-07-10T13:03:40.158Z" }, + { url = "https://files.pythonhosted.org/packages/51/e1/7f1c77515d369b7419c5b501196526dad3e72800946c0099594c1f0c20b4/aiohttp-3.12.14-cp311-cp311-win32.whl", hash = "sha256:a289f50bf1bd5be227376c067927f78079a7bdeccf8daa6a9e65c38bae14324b", size = 427907, upload-time = "2025-07-10T13:03:41.801Z" }, + { url = "https://files.pythonhosted.org/packages/06/24/a6bf915c85b7a5b07beba3d42b3282936b51e4578b64a51e8e875643c276/aiohttp-3.12.14-cp311-cp311-win_amd64.whl", hash = "sha256:0b8a69acaf06b17e9c54151a6c956339cf46db4ff72b3ac28516d0f7068f4ced", size = 452334, upload-time = "2025-07-10T13:03:43.485Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0d/29026524e9336e33d9767a1e593ae2b24c2b8b09af7c2bd8193762f76b3e/aiohttp-3.12.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a0ecbb32fc3e69bc25efcda7d28d38e987d007096cbbeed04f14a6662d0eee22", size = 701055, upload-time = "2025-07-10T13:03:45.59Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b8/a5e8e583e6c8c1056f4b012b50a03c77a669c2e9bf012b7cf33d6bc4b141/aiohttp-3.12.14-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0400f0ca9bb3e0b02f6466421f253797f6384e9845820c8b05e976398ac1d81a", size = 475670, upload-time = "2025-07-10T13:03:47.249Z" }, + { url = "https://files.pythonhosted.org/packages/29/e8/5202890c9e81a4ec2c2808dd90ffe024952e72c061729e1d49917677952f/aiohttp-3.12.14-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a56809fed4c8a830b5cae18454b7464e1529dbf66f71c4772e3cfa9cbec0a1ff", size = 468513, upload-time = "2025-07-10T13:03:49.377Z" }, + { url = "https://files.pythonhosted.org/packages/23/e5/d11db8c23d8923d3484a27468a40737d50f05b05eebbb6288bafcb467356/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27f2e373276e4755691a963e5d11756d093e346119f0627c2d6518208483fb6d", size = 1715309, upload-time = "2025-07-10T13:03:51.556Z" }, + { url = "https://files.pythonhosted.org/packages/53/44/af6879ca0eff7a16b1b650b7ea4a827301737a350a464239e58aa7c387ef/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ca39e433630e9a16281125ef57ece6817afd1d54c9f1bf32e901f38f16035869", size = 1697961, upload-time = "2025-07-10T13:03:53.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/94/18457f043399e1ec0e59ad8674c0372f925363059c276a45a1459e17f423/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c748b3f8b14c77720132b2510a7d9907a03c20ba80f469e58d5dfd90c079a1c", size = 1753055, upload-time = "2025-07-10T13:03:55.368Z" }, + { url = "https://files.pythonhosted.org/packages/26/d9/1d3744dc588fafb50ff8a6226d58f484a2242b5dd93d8038882f55474d41/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a568abe1b15ce69d4cc37e23020720423f0728e3cb1f9bcd3f53420ec3bfe7", size = 1799211, upload-time = "2025-07-10T13:03:57.216Z" }, + { url = "https://files.pythonhosted.org/packages/73/12/2530fb2b08773f717ab2d249ca7a982ac66e32187c62d49e2c86c9bba9b4/aiohttp-3.12.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9888e60c2c54eaf56704b17feb558c7ed6b7439bca1e07d4818ab878f2083660", size = 1718649, upload-time = "2025-07-10T13:03:59.469Z" }, + { url = "https://files.pythonhosted.org/packages/b9/34/8d6015a729f6571341a311061b578e8b8072ea3656b3d72329fa0faa2c7c/aiohttp-3.12.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3006a1dc579b9156de01e7916d38c63dc1ea0679b14627a37edf6151bc530088", size = 1634452, upload-time = "2025-07-10T13:04:01.698Z" }, + { url = "https://files.pythonhosted.org/packages/ff/4b/08b83ea02595a582447aeb0c1986792d0de35fe7a22fb2125d65091cbaf3/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:aa8ec5c15ab80e5501a26719eb48a55f3c567da45c6ea5bb78c52c036b2655c7", size = 1695511, upload-time = "2025-07-10T13:04:04.165Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/9c7c31037a063eec13ecf1976185c65d1394ded4a5120dd5965e3473cb21/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:39b94e50959aa07844c7fe2206b9f75d63cc3ad1c648aaa755aa257f6f2498a9", size = 1716967, upload-time = "2025-07-10T13:04:06.132Z" }, + { url = "https://files.pythonhosted.org/packages/ba/02/84406e0ad1acb0fb61fd617651ab6de760b2d6a31700904bc0b33bd0894d/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:04c11907492f416dad9885d503fbfc5dcb6768d90cad8639a771922d584609d3", size = 1657620, upload-time = "2025-07-10T13:04:07.944Z" }, + { url = "https://files.pythonhosted.org/packages/07/53/da018f4013a7a179017b9a274b46b9a12cbeb387570f116964f498a6f211/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:88167bd9ab69bb46cee91bd9761db6dfd45b6e76a0438c7e884c3f8160ff21eb", size = 1737179, upload-time = "2025-07-10T13:04:10.182Z" }, + { url = "https://files.pythonhosted.org/packages/49/e8/ca01c5ccfeaafb026d85fa4f43ceb23eb80ea9c1385688db0ef322c751e9/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:791504763f25e8f9f251e4688195e8b455f8820274320204f7eafc467e609425", size = 1765156, upload-time = "2025-07-10T13:04:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/22/32/5501ab525a47ba23c20613e568174d6c63aa09e2caa22cded5c6ea8e3ada/aiohttp-3.12.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2785b112346e435dd3a1a67f67713a3fe692d288542f1347ad255683f066d8e0", size = 1724766, upload-time = "2025-07-10T13:04:13.961Z" }, + { url = "https://files.pythonhosted.org/packages/06/af/28e24574801fcf1657945347ee10df3892311c2829b41232be6089e461e7/aiohttp-3.12.14-cp312-cp312-win32.whl", hash = "sha256:15f5f4792c9c999a31d8decf444e79fcfd98497bf98e94284bf390a7bb8c1729", size = 422641, upload-time = "2025-07-10T13:04:16.018Z" }, + { url = "https://files.pythonhosted.org/packages/98/d5/7ac2464aebd2eecac38dbe96148c9eb487679c512449ba5215d233755582/aiohttp-3.12.14-cp312-cp312-win_amd64.whl", hash = "sha256:3b66e1a182879f579b105a80d5c4bd448b91a57e8933564bf41665064796a338", size = 449316, upload-time = "2025-07-10T13:04:18.289Z" }, + { url = "https://files.pythonhosted.org/packages/06/48/e0d2fa8ac778008071e7b79b93ab31ef14ab88804d7ba71b5c964a7c844e/aiohttp-3.12.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:3143a7893d94dc82bc409f7308bc10d60285a3cd831a68faf1aa0836c5c3c767", size = 695471, upload-time = "2025-07-10T13:04:20.124Z" }, + { url = "https://files.pythonhosted.org/packages/8d/e7/f73206afa33100804f790b71092888f47df65fd9a4cd0e6800d7c6826441/aiohttp-3.12.14-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3d62ac3d506cef54b355bd34c2a7c230eb693880001dfcda0bf88b38f5d7af7e", size = 473128, upload-time = "2025-07-10T13:04:21.928Z" }, + { url = "https://files.pythonhosted.org/packages/df/e2/4dd00180be551a6e7ee979c20fc7c32727f4889ee3fd5b0586e0d47f30e1/aiohttp-3.12.14-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48e43e075c6a438937c4de48ec30fa8ad8e6dfef122a038847456bfe7b947b63", size = 465426, upload-time = "2025-07-10T13:04:24.071Z" }, + { url = "https://files.pythonhosted.org/packages/de/dd/525ed198a0bb674a323e93e4d928443a680860802c44fa7922d39436b48b/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:077b4488411a9724cecc436cbc8c133e0d61e694995b8de51aaf351c7578949d", size = 1704252, upload-time = "2025-07-10T13:04:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/d8/b1/01e542aed560a968f692ab4fc4323286e8bc4daae83348cd63588e4f33e3/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d8c35632575653f297dcbc9546305b2c1133391089ab925a6a3706dfa775ccab", size = 1685514, upload-time = "2025-07-10T13:04:28.186Z" }, + { url = "https://files.pythonhosted.org/packages/b3/06/93669694dc5fdabdc01338791e70452d60ce21ea0946a878715688d5a191/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b8ce87963f0035c6834b28f061df90cf525ff7c9b6283a8ac23acee6502afd4", size = 1737586, upload-time = "2025-07-10T13:04:30.195Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3a/18991048ffc1407ca51efb49ba8bcc1645961f97f563a6c480cdf0286310/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f0a2cf66e32a2563bb0766eb24eae7e9a269ac0dc48db0aae90b575dc9583026", size = 1786958, upload-time = "2025-07-10T13:04:32.482Z" }, + { url = "https://files.pythonhosted.org/packages/30/a8/81e237f89a32029f9b4a805af6dffc378f8459c7b9942712c809ff9e76e5/aiohttp-3.12.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdea089caf6d5cde975084a884c72d901e36ef9c2fd972c9f51efbbc64e96fbd", size = 1709287, upload-time = "2025-07-10T13:04:34.493Z" }, + { url = "https://files.pythonhosted.org/packages/8c/e3/bd67a11b0fe7fc12c6030473afd9e44223d456f500f7cf526dbaa259ae46/aiohttp-3.12.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7865f27db67d49e81d463da64a59365ebd6b826e0e4847aa111056dcb9dc88", size = 1622990, upload-time = "2025-07-10T13:04:36.433Z" }, + { url = "https://files.pythonhosted.org/packages/83/ba/e0cc8e0f0d9ce0904e3cf2d6fa41904e379e718a013c721b781d53dcbcca/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0ab5b38a6a39781d77713ad930cb5e7feea6f253de656a5f9f281a8f5931b086", size = 1676015, upload-time = "2025-07-10T13:04:38.958Z" }, + { url = "https://files.pythonhosted.org/packages/d8/b3/1e6c960520bda094c48b56de29a3d978254637ace7168dd97ddc273d0d6c/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:9b3b15acee5c17e8848d90a4ebc27853f37077ba6aec4d8cb4dbbea56d156933", size = 1707678, upload-time = "2025-07-10T13:04:41.275Z" }, + { url = "https://files.pythonhosted.org/packages/0a/19/929a3eb8c35b7f9f076a462eaa9830b32c7f27d3395397665caa5e975614/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e4c972b0bdaac167c1e53e16a16101b17c6d0ed7eac178e653a07b9f7fad7151", size = 1650274, upload-time = "2025-07-10T13:04:43.483Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/81682a6f20dd1b18ce3d747de8eba11cbef9b270f567426ff7880b096b48/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7442488b0039257a3bdbc55f7209587911f143fca11df9869578db6c26feeeb8", size = 1726408, upload-time = "2025-07-10T13:04:45.577Z" }, + { url = "https://files.pythonhosted.org/packages/8c/17/884938dffaa4048302985483f77dfce5ac18339aad9b04ad4aaa5e32b028/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f68d3067eecb64c5e9bab4a26aa11bd676f4c70eea9ef6536b0a4e490639add3", size = 1759879, upload-time = "2025-07-10T13:04:47.663Z" }, + { url = "https://files.pythonhosted.org/packages/95/78/53b081980f50b5cf874359bde707a6eacd6c4be3f5f5c93937e48c9d0025/aiohttp-3.12.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f88d3704c8b3d598a08ad17d06006cb1ca52a1182291f04979e305c8be6c9758", size = 1708770, upload-time = "2025-07-10T13:04:49.944Z" }, + { url = "https://files.pythonhosted.org/packages/ed/91/228eeddb008ecbe3ffa6c77b440597fdf640307162f0c6488e72c5a2d112/aiohttp-3.12.14-cp313-cp313-win32.whl", hash = "sha256:a3c99ab19c7bf375c4ae3debd91ca5d394b98b6089a03231d4c580ef3c2ae4c5", size = 421688, upload-time = "2025-07-10T13:04:51.993Z" }, + { url = "https://files.pythonhosted.org/packages/66/5f/8427618903343402fdafe2850738f735fd1d9409d2a8f9bcaae5e630d3ba/aiohttp-3.12.14-cp313-cp313-win_amd64.whl", hash = "sha256:3f8aad695e12edc9d571f878c62bedc91adf30c760c8632f09663e5f564f4baa", size = 448098, upload-time = "2025-07-10T13:04:53.999Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + [[package]] name = "annotated-types" version = "0.7.0" @@ -41,6 +149,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3b/00/2344469e2084fb287c2e0b57b72910309874c3245463acd6cf5e3db69324/appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128", size = 9566, upload-time = "2020-05-11T07:59:49.499Z" }, ] +[[package]] +name = "async-timeout" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/ae/136395dfbfe00dfc94da3f3e136d0b13f394cba8f4841120e34226265780/async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3", size = 9274, upload-time = "2024-11-06T16:41:39.6Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/ba/e2081de779ca30d473f21f5b30e0e737c438205440784c7dfc81efc2b029/async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c", size = 6233, upload-time = "2024-11-06T16:41:37.9Z" }, +] + [[package]] name = "attrs" version = "25.3.0" @@ -62,6 +179,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/84/29/587c189bbab1ccc8c86a03a5d0e13873df916380ef1be461ebe6acebf48d/authlib-1.6.0-py2.py3-none-any.whl", hash = "sha256:91685589498f79e8655e8a8947431ad6288831d643f11c55c2143ffcc738048d", size = 239981, upload-time = "2025-05-23T00:21:43.075Z" }, ] +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "beartype" +version = "0.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/f9/21e5a9c731e14f08addd53c71fea2e70794e009de5b98e6a2c3d2f3015d6/beartype-0.21.0.tar.gz", hash = "sha256:f9a5078f5ce87261c2d22851d19b050b64f6a805439e8793aecf01ce660d3244", size = 1437066, upload-time = "2025-05-22T05:09:27.116Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/31/87045d1c66ee10a52486c9d2047bc69f00f2689f69401bb1e998afb4b205/beartype-0.21.0-py3-none-any.whl", hash = "sha256:b6a1bd56c72f31b0a496a36cc55df6e2f475db166ad07fa4acc7e74f4c7f34c0", size = 1191340, upload-time = "2025-05-22T05:09:24.606Z" }, +] + [[package]] name = "beautifulsoup4" version = "4.13.4" @@ -86,11 +221,11 @@ wheels = [ [[package]] name = "certifi" -version = "2025.6.15" +version = "2025.7.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/73/f7/f14b46d4bcd21092d7d3ccef689615220d8a08fb25e564b65d20738e672e/certifi-2025.6.15.tar.gz", hash = "sha256:d747aa5a8b9bbbb1bb8c22bb13e22bd1f18e9796defa16bab421f7f7a317323b", size = 158753, upload-time = "2025-06-15T02:45:51.329Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/84/ae/320161bd181fc06471eed047ecce67b693fd7515b16d495d8932db763426/certifi-2025.6.15-py3-none-any.whl", hash = "sha256:2e0c7ce7cb5d8f8634ca55d2ba7e6ec2689a2fd6537d8dec1296a477a4910057", size = 157650, upload-time = "2025-06-15T02:45:49.977Z" }, + { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" }, ] [[package]] @@ -150,6 +285,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" }, ] +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, +] + [[package]] name = "charset-normalizer" version = "3.4.2" @@ -213,14 +357,14 @@ wheels = [ [[package]] name = "click" -version = "8.2.1" +version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593, upload-time = "2024-12-21T18:38:44.339Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188, upload-time = "2024-12-21T18:38:41.666Z" }, ] [[package]] @@ -232,6 +376,75 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] +[[package]] +name = "coverage" +version = "7.9.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/b7/c0465ca253df10a9e8dae0692a4ae6e9726d245390aaef92360e1d6d3832/coverage-7.9.2.tar.gz", hash = "sha256:997024fa51e3290264ffd7492ec97d0690293ccd2b45a6cd7d82d945a4a80c8b", size = 813556, upload-time = "2025-07-03T10:54:15.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/0d/5c2114fd776c207bd55068ae8dc1bef63ecd1b767b3389984a8e58f2b926/coverage-7.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:66283a192a14a3854b2e7f3418d7db05cdf411012ab7ff5db98ff3b181e1f912", size = 212039, upload-time = "2025-07-03T10:52:38.955Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ad/dc51f40492dc2d5fcd31bb44577bc0cc8920757d6bc5d3e4293146524ef9/coverage-7.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4e01d138540ef34fcf35c1aa24d06c3de2a4cffa349e29a10056544f35cca15f", size = 212428, upload-time = "2025-07-03T10:52:41.36Z" }, + { url = "https://files.pythonhosted.org/packages/a2/a3/55cb3ff1b36f00df04439c3993d8529193cdf165a2467bf1402539070f16/coverage-7.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f22627c1fe2745ee98d3ab87679ca73a97e75ca75eb5faee48660d060875465f", size = 241534, upload-time = "2025-07-03T10:52:42.956Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c9/a8410b91b6be4f6e9c2e9f0dce93749b6b40b751d7065b4410bf89cb654b/coverage-7.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b1c2d8363247b46bd51f393f86c94096e64a1cf6906803fa8d5a9d03784bdbf", size = 239408, upload-time = "2025-07-03T10:52:44.199Z" }, + { url = "https://files.pythonhosted.org/packages/ff/c4/6f3e56d467c612b9070ae71d5d3b114c0b899b5788e1ca3c93068ccb7018/coverage-7.9.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c10c882b114faf82dbd33e876d0cbd5e1d1ebc0d2a74ceef642c6152f3f4d547", size = 240552, upload-time = "2025-07-03T10:52:45.477Z" }, + { url = "https://files.pythonhosted.org/packages/fd/20/04eda789d15af1ce79bce5cc5fd64057c3a0ac08fd0576377a3096c24663/coverage-7.9.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:de3c0378bdf7066c3988d66cd5232d161e933b87103b014ab1b0b4676098fa45", size = 240464, upload-time = "2025-07-03T10:52:46.809Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5a/217b32c94cc1a0b90f253514815332d08ec0812194a1ce9cca97dda1cd20/coverage-7.9.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:1e2f097eae0e5991e7623958a24ced3282676c93c013dde41399ff63e230fcf2", size = 239134, upload-time = "2025-07-03T10:52:48.149Z" }, + { url = "https://files.pythonhosted.org/packages/34/73/1d019c48f413465eb5d3b6898b6279e87141c80049f7dbf73fd020138549/coverage-7.9.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:28dc1f67e83a14e7079b6cea4d314bc8b24d1aed42d3582ff89c0295f09b181e", size = 239405, upload-time = "2025-07-03T10:52:49.687Z" }, + { url = "https://files.pythonhosted.org/packages/49/6c/a2beca7aa2595dad0c0d3f350382c381c92400efe5261e2631f734a0e3fe/coverage-7.9.2-cp310-cp310-win32.whl", hash = "sha256:bf7d773da6af9e10dbddacbf4e5cab13d06d0ed93561d44dae0188a42c65be7e", size = 214519, upload-time = "2025-07-03T10:52:51.036Z" }, + { url = "https://files.pythonhosted.org/packages/fc/c8/91e5e4a21f9a51e2c7cdd86e587ae01a4fcff06fc3fa8cde4d6f7cf68df4/coverage-7.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:0c0378ba787681ab1897f7c89b415bd56b0b2d9a47e5a3d8dc0ea55aac118d6c", size = 215400, upload-time = "2025-07-03T10:52:52.313Z" }, + { url = "https://files.pythonhosted.org/packages/39/40/916786453bcfafa4c788abee4ccd6f592b5b5eca0cd61a32a4e5a7ef6e02/coverage-7.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a7a56a2964a9687b6aba5b5ced6971af308ef6f79a91043c05dd4ee3ebc3e9ba", size = 212152, upload-time = "2025-07-03T10:52:53.562Z" }, + { url = "https://files.pythonhosted.org/packages/9f/66/cc13bae303284b546a030762957322bbbff1ee6b6cb8dc70a40f8a78512f/coverage-7.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123d589f32c11d9be7fe2e66d823a236fe759b0096f5db3fb1b75b2fa414a4fa", size = 212540, upload-time = "2025-07-03T10:52:55.196Z" }, + { url = "https://files.pythonhosted.org/packages/0f/3c/d56a764b2e5a3d43257c36af4a62c379df44636817bb5f89265de4bf8bd7/coverage-7.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:333b2e0ca576a7dbd66e85ab402e35c03b0b22f525eed82681c4b866e2e2653a", size = 245097, upload-time = "2025-07-03T10:52:56.509Z" }, + { url = "https://files.pythonhosted.org/packages/b1/46/bd064ea8b3c94eb4ca5d90e34d15b806cba091ffb2b8e89a0d7066c45791/coverage-7.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:326802760da234baf9f2f85a39e4a4b5861b94f6c8d95251f699e4f73b1835dc", size = 242812, upload-time = "2025-07-03T10:52:57.842Z" }, + { url = "https://files.pythonhosted.org/packages/43/02/d91992c2b29bc7afb729463bc918ebe5f361be7f1daae93375a5759d1e28/coverage-7.9.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19e7be4cfec248df38ce40968c95d3952fbffd57b400d4b9bb580f28179556d2", size = 244617, upload-time = "2025-07-03T10:52:59.239Z" }, + { url = "https://files.pythonhosted.org/packages/b7/4f/8fadff6bf56595a16d2d6e33415841b0163ac660873ed9a4e9046194f779/coverage-7.9.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0b4a4cb73b9f2b891c1788711408ef9707666501ba23684387277ededab1097c", size = 244263, upload-time = "2025-07-03T10:53:00.601Z" }, + { url = "https://files.pythonhosted.org/packages/9b/d2/e0be7446a2bba11739edb9f9ba4eff30b30d8257370e237418eb44a14d11/coverage-7.9.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2c8937fa16c8c9fbbd9f118588756e7bcdc7e16a470766a9aef912dd3f117dbd", size = 242314, upload-time = "2025-07-03T10:53:01.932Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7d/dcbac9345000121b8b57a3094c2dfcf1ccc52d8a14a40c1d4bc89f936f80/coverage-7.9.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42da2280c4d30c57a9b578bafd1d4494fa6c056d4c419d9689e66d775539be74", size = 242904, upload-time = "2025-07-03T10:53:03.478Z" }, + { url = "https://files.pythonhosted.org/packages/41/58/11e8db0a0c0510cf31bbbdc8caf5d74a358b696302a45948d7c768dfd1cf/coverage-7.9.2-cp311-cp311-win32.whl", hash = "sha256:14fa8d3da147f5fdf9d298cacc18791818f3f1a9f542c8958b80c228320e90c6", size = 214553, upload-time = "2025-07-03T10:53:05.174Z" }, + { url = "https://files.pythonhosted.org/packages/3a/7d/751794ec8907a15e257136e48dc1021b1f671220ecccfd6c4eaf30802714/coverage-7.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:549cab4892fc82004f9739963163fd3aac7a7b0df430669b75b86d293d2df2a7", size = 215441, upload-time = "2025-07-03T10:53:06.472Z" }, + { url = "https://files.pythonhosted.org/packages/62/5b/34abcedf7b946c1c9e15b44f326cb5b0da852885312b30e916f674913428/coverage-7.9.2-cp311-cp311-win_arm64.whl", hash = "sha256:c2667a2b913e307f06aa4e5677f01a9746cd08e4b35e14ebcde6420a9ebb4c62", size = 213873, upload-time = "2025-07-03T10:53:07.699Z" }, + { url = "https://files.pythonhosted.org/packages/53/d7/7deefc6fd4f0f1d4c58051f4004e366afc9e7ab60217ac393f247a1de70a/coverage-7.9.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ae9eb07f1cfacd9cfe8eaee6f4ff4b8a289a668c39c165cd0c8548484920ffc0", size = 212344, upload-time = "2025-07-03T10:53:09.3Z" }, + { url = "https://files.pythonhosted.org/packages/95/0c/ee03c95d32be4d519e6a02e601267769ce2e9a91fc8faa1b540e3626c680/coverage-7.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9ce85551f9a1119f02adc46d3014b5ee3f765deac166acf20dbb851ceb79b6f3", size = 212580, upload-time = "2025-07-03T10:53:11.52Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9f/826fa4b544b27620086211b87a52ca67592622e1f3af9e0a62c87aea153a/coverage-7.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8f6389ac977c5fb322e0e38885fbbf901743f79d47f50db706e7644dcdcb6e1", size = 246383, upload-time = "2025-07-03T10:53:13.134Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b3/4477aafe2a546427b58b9c540665feff874f4db651f4d3cb21b308b3a6d2/coverage-7.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff0d9eae8cdfcd58fe7893b88993723583a6ce4dfbfd9f29e001922544f95615", size = 243400, upload-time = "2025-07-03T10:53:14.614Z" }, + { url = "https://files.pythonhosted.org/packages/f8/c2/efffa43778490c226d9d434827702f2dfbc8041d79101a795f11cbb2cf1e/coverage-7.9.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae939811e14e53ed8a9818dad51d434a41ee09df9305663735f2e2d2d7d959b", size = 245591, upload-time = "2025-07-03T10:53:15.872Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e7/a59888e882c9a5f0192d8627a30ae57910d5d449c80229b55e7643c078c4/coverage-7.9.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:31991156251ec202c798501e0a42bbdf2169dcb0f137b1f5c0f4267f3fc68ef9", size = 245402, upload-time = "2025-07-03T10:53:17.124Z" }, + { url = "https://files.pythonhosted.org/packages/92/a5/72fcd653ae3d214927edc100ce67440ed8a0a1e3576b8d5e6d066ed239db/coverage-7.9.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d0d67963f9cbfc7c7f96d4ac74ed60ecbebd2ea6eeb51887af0f8dce205e545f", size = 243583, upload-time = "2025-07-03T10:53:18.781Z" }, + { url = "https://files.pythonhosted.org/packages/5c/f5/84e70e4df28f4a131d580d7d510aa1ffd95037293da66fd20d446090a13b/coverage-7.9.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:49b752a2858b10580969ec6af6f090a9a440a64a301ac1528d7ca5f7ed497f4d", size = 244815, upload-time = "2025-07-03T10:53:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/39/e7/d73d7cbdbd09fdcf4642655ae843ad403d9cbda55d725721965f3580a314/coverage-7.9.2-cp312-cp312-win32.whl", hash = "sha256:88d7598b8ee130f32f8a43198ee02edd16d7f77692fa056cb779616bbea1b355", size = 214719, upload-time = "2025-07-03T10:53:21.521Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d6/7486dcc3474e2e6ad26a2af2db7e7c162ccd889c4c68fa14ea8ec189c9e9/coverage-7.9.2-cp312-cp312-win_amd64.whl", hash = "sha256:9dfb070f830739ee49d7c83e4941cc767e503e4394fdecb3b54bfdac1d7662c0", size = 215509, upload-time = "2025-07-03T10:53:22.853Z" }, + { url = "https://files.pythonhosted.org/packages/b7/34/0439f1ae2593b0346164d907cdf96a529b40b7721a45fdcf8b03c95fcd90/coverage-7.9.2-cp312-cp312-win_arm64.whl", hash = "sha256:4e2c058aef613e79df00e86b6d42a641c877211384ce5bd07585ed7ba71ab31b", size = 213910, upload-time = "2025-07-03T10:53:24.472Z" }, + { url = "https://files.pythonhosted.org/packages/94/9d/7a8edf7acbcaa5e5c489a646226bed9591ee1c5e6a84733c0140e9ce1ae1/coverage-7.9.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:985abe7f242e0d7bba228ab01070fde1d6c8fa12f142e43debe9ed1dde686038", size = 212367, upload-time = "2025-07-03T10:53:25.811Z" }, + { url = "https://files.pythonhosted.org/packages/e8/9e/5cd6f130150712301f7e40fb5865c1bc27b97689ec57297e568d972eec3c/coverage-7.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82c3939264a76d44fde7f213924021ed31f55ef28111a19649fec90c0f109e6d", size = 212632, upload-time = "2025-07-03T10:53:27.075Z" }, + { url = "https://files.pythonhosted.org/packages/a8/de/6287a2c2036f9fd991c61cefa8c64e57390e30c894ad3aa52fac4c1e14a8/coverage-7.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae5d563e970dbe04382f736ec214ef48103d1b875967c89d83c6e3f21706d5b3", size = 245793, upload-time = "2025-07-03T10:53:28.408Z" }, + { url = "https://files.pythonhosted.org/packages/06/cc/9b5a9961d8160e3cb0b558c71f8051fe08aa2dd4b502ee937225da564ed1/coverage-7.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdd612e59baed2a93c8843c9a7cb902260f181370f1d772f4842987535071d14", size = 243006, upload-time = "2025-07-03T10:53:29.754Z" }, + { url = "https://files.pythonhosted.org/packages/49/d9/4616b787d9f597d6443f5588619c1c9f659e1f5fc9eebf63699eb6d34b78/coverage-7.9.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:256ea87cb2a1ed992bcdfc349d8042dcea1b80436f4ddf6e246d6bee4b5d73b6", size = 244990, upload-time = "2025-07-03T10:53:31.098Z" }, + { url = "https://files.pythonhosted.org/packages/48/83/801cdc10f137b2d02b005a761661649ffa60eb173dcdaeb77f571e4dc192/coverage-7.9.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f44ae036b63c8ea432f610534a2668b0c3aee810e7037ab9d8ff6883de480f5b", size = 245157, upload-time = "2025-07-03T10:53:32.717Z" }, + { url = "https://files.pythonhosted.org/packages/c8/a4/41911ed7e9d3ceb0ffb019e7635468df7499f5cc3edca5f7dfc078e9c5ec/coverage-7.9.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82d76ad87c932935417a19b10cfe7abb15fd3f923cfe47dbdaa74ef4e503752d", size = 243128, upload-time = "2025-07-03T10:53:34.009Z" }, + { url = "https://files.pythonhosted.org/packages/10/41/344543b71d31ac9cb00a664d5d0c9ef134a0fe87cb7d8430003b20fa0b7d/coverage-7.9.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:619317bb86de4193debc712b9e59d5cffd91dc1d178627ab2a77b9870deb2868", size = 244511, upload-time = "2025-07-03T10:53:35.434Z" }, + { url = "https://files.pythonhosted.org/packages/d5/81/3b68c77e4812105e2a060f6946ba9e6f898ddcdc0d2bfc8b4b152a9ae522/coverage-7.9.2-cp313-cp313-win32.whl", hash = "sha256:0a07757de9feb1dfafd16ab651e0f628fd7ce551604d1bf23e47e1ddca93f08a", size = 214765, upload-time = "2025-07-03T10:53:36.787Z" }, + { url = "https://files.pythonhosted.org/packages/06/a2/7fac400f6a346bb1a4004eb2a76fbff0e242cd48926a2ce37a22a6a1d917/coverage-7.9.2-cp313-cp313-win_amd64.whl", hash = "sha256:115db3d1f4d3f35f5bb021e270edd85011934ff97c8797216b62f461dd69374b", size = 215536, upload-time = "2025-07-03T10:53:38.188Z" }, + { url = "https://files.pythonhosted.org/packages/08/47/2c6c215452b4f90d87017e61ea0fd9e0486bb734cb515e3de56e2c32075f/coverage-7.9.2-cp313-cp313-win_arm64.whl", hash = "sha256:48f82f889c80af8b2a7bb6e158d95a3fbec6a3453a1004d04e4f3b5945a02694", size = 213943, upload-time = "2025-07-03T10:53:39.492Z" }, + { url = "https://files.pythonhosted.org/packages/a3/46/e211e942b22d6af5e0f323faa8a9bc7c447a1cf1923b64c47523f36ed488/coverage-7.9.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:55a28954545f9d2f96870b40f6c3386a59ba8ed50caf2d949676dac3ecab99f5", size = 213088, upload-time = "2025-07-03T10:53:40.874Z" }, + { url = "https://files.pythonhosted.org/packages/d2/2f/762551f97e124442eccd907bf8b0de54348635b8866a73567eb4e6417acf/coverage-7.9.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cdef6504637731a63c133bb2e6f0f0214e2748495ec15fe42d1e219d1b133f0b", size = 213298, upload-time = "2025-07-03T10:53:42.218Z" }, + { url = "https://files.pythonhosted.org/packages/7a/b7/76d2d132b7baf7360ed69be0bcab968f151fa31abe6d067f0384439d9edb/coverage-7.9.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bcd5ebe66c7a97273d5d2ddd4ad0ed2e706b39630ed4b53e713d360626c3dbb3", size = 256541, upload-time = "2025-07-03T10:53:43.823Z" }, + { url = "https://files.pythonhosted.org/packages/a0/17/392b219837d7ad47d8e5974ce5f8dc3deb9f99a53b3bd4d123602f960c81/coverage-7.9.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9303aed20872d7a3c9cb39c5d2b9bdbe44e3a9a1aecb52920f7e7495410dfab8", size = 252761, upload-time = "2025-07-03T10:53:45.19Z" }, + { url = "https://files.pythonhosted.org/packages/d5/77/4256d3577fe1b0daa8d3836a1ebe68eaa07dd2cbaf20cf5ab1115d6949d4/coverage-7.9.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc18ea9e417a04d1920a9a76fe9ebd2f43ca505b81994598482f938d5c315f46", size = 254917, upload-time = "2025-07-03T10:53:46.931Z" }, + { url = "https://files.pythonhosted.org/packages/53/99/fc1a008eef1805e1ddb123cf17af864743354479ea5129a8f838c433cc2c/coverage-7.9.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6406cff19880aaaadc932152242523e892faff224da29e241ce2fca329866584", size = 256147, upload-time = "2025-07-03T10:53:48.289Z" }, + { url = "https://files.pythonhosted.org/packages/92/c0/f63bf667e18b7f88c2bdb3160870e277c4874ced87e21426128d70aa741f/coverage-7.9.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2d0d4f6ecdf37fcc19c88fec3e2277d5dee740fb51ffdd69b9579b8c31e4232e", size = 254261, upload-time = "2025-07-03T10:53:49.99Z" }, + { url = "https://files.pythonhosted.org/packages/8c/32/37dd1c42ce3016ff8ec9e4b607650d2e34845c0585d3518b2a93b4830c1a/coverage-7.9.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c33624f50cf8de418ab2b4d6ca9eda96dc45b2c4231336bac91454520e8d1fac", size = 255099, upload-time = "2025-07-03T10:53:51.354Z" }, + { url = "https://files.pythonhosted.org/packages/da/2e/af6b86f7c95441ce82f035b3affe1cd147f727bbd92f563be35e2d585683/coverage-7.9.2-cp313-cp313t-win32.whl", hash = "sha256:1df6b76e737c6a92210eebcb2390af59a141f9e9430210595251fbaf02d46926", size = 215440, upload-time = "2025-07-03T10:53:52.808Z" }, + { url = "https://files.pythonhosted.org/packages/4d/bb/8a785d91b308867f6b2e36e41c569b367c00b70c17f54b13ac29bcd2d8c8/coverage-7.9.2-cp313-cp313t-win_amd64.whl", hash = "sha256:f5fd54310b92741ebe00d9c0d1d7b2b27463952c022da6d47c175d246a98d1bd", size = 216537, upload-time = "2025-07-03T10:53:54.273Z" }, + { url = "https://files.pythonhosted.org/packages/1d/a0/a6bffb5e0f41a47279fd45a8f3155bf193f77990ae1c30f9c224b61cacb0/coverage-7.9.2-cp313-cp313t-win_arm64.whl", hash = "sha256:c48c2375287108c887ee87d13b4070a381c6537d30e8487b24ec721bf2a781cb", size = 214398, upload-time = "2025-07-03T10:53:56.715Z" }, + { url = "https://files.pythonhosted.org/packages/d7/85/f8bbefac27d286386961c25515431482a425967e23d3698b75a250872924/coverage-7.9.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:8a1166db2fb62473285bcb092f586e081e92656c7dfa8e9f62b4d39d7e6b5050", size = 204013, upload-time = "2025-07-03T10:54:12.084Z" }, + { url = "https://files.pythonhosted.org/packages/3c/38/bbe2e63902847cf79036ecc75550d0698af31c91c7575352eb25190d0fb3/coverage-7.9.2-py3-none-any.whl", hash = "sha256:e425cd5b00f6fc0ed7cdbd766c70be8baab4b7839e4d4fe5fac48581dd968ea4", size = 204005, upload-time = "2025-07-03T10:54:13.491Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + [[package]] name = "cryptography" version = "45.0.5" @@ -279,6 +492,22 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f6/34/31a1604c9a9ade0fdab61eb48570e09a796f4d9836121266447b0eaf7feb/cryptography-45.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:e357286c1b76403dd384d938f93c46b2b058ed4dfcdce64a770f0537ed3feb6f", size = 3331106, upload-time = "2025-07-02T13:06:18.058Z" }, ] +[[package]] +name = "cyclopts" +version = "3.22.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "docstring-parser", marker = "python_full_version < '4.0'" }, + { name = "rich" }, + { name = "rich-rst" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cc/2e/8c45ef5b00bd48d7cabbf6f90b7f12df4c232755cd46e6dbc6690f9ac0c5/cyclopts-3.22.2.tar.gz", hash = "sha256:d3495231af6ae86479579777d212ddf77b113200f828badeaf401162ed87227d", size = 74520, upload-time = "2025-07-09T12:21:46.866Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/5b/5939e05d87def1612c494429bee705d6b852fad1d21dd2dee1e3ce39997e/cyclopts-3.22.2-py3-none-any.whl", hash = "sha256:6681b0815fa2de2bccc364468fd25b15aa9617cb505c0b16ca62e2b18a57619e", size = 84578, upload-time = "2025-07-09T12:21:44.878Z" }, +] + [[package]] name = "db-dtypes" version = "1.4.3" @@ -295,6 +524,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/91/66065d933b4814295fd0ddc16a66ef193dff14bf8d15895723f38640a3ab/db_dtypes-1.4.3-py3-none-any.whl", hash = "sha256:a1c92b819af947fae1701d80a71f2a0eac08f825ca52cf0c68aeba80577ae966", size = 18110, upload-time = "2025-05-12T13:54:20.146Z" }, ] +[[package]] +name = "distlib" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/8e/709914eb2b5749865801041647dc7f4e6d00b549cfe88b65ca192995f07c/distlib-0.4.0.tar.gz", hash = "sha256:feec40075be03a04501a973d81f633735b4b69f98b05450592310c0f401a4e0d", size = 614605, upload-time = "2025-07-17T16:52:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" }, +] + [[package]] name = "dnspython" version = "2.7.0" @@ -304,6 +542,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" }, ] +[[package]] +name = "docstring-parser" +version = "0.16" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/08/12/9c22a58c0b1e29271051222d8906257616da84135af9ed167c9e28f85cb3/docstring_parser-0.16.tar.gz", hash = "sha256:538beabd0af1e2db0146b6bd3caa526c35a34d61af9fd2887f3a8a27a739aa6e", size = 26565, upload-time = "2024-03-15T10:39:44.419Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/7c/e9fcff7623954d86bdc17782036cbf715ecab1bec4847c008557affe1ca8/docstring_parser-0.16-py3-none-any.whl", hash = "sha256:bf0a1387354d3691d102edef7ec124f219ef639982d096e26e3b60aeffa90637", size = 36533, upload-time = "2024-03-15T10:39:41.527Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + [[package]] name = "ecdsa" version = "0.19.1" @@ -343,22 +599,126 @@ wheels = [ [[package]] name = "fastmcp" -version = "2.10.2" +version = "2.10.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "authlib" }, + { name = "cyclopts" }, { name = "exceptiongroup" }, { name = "httpx" }, { name = "mcp" }, { name = "openapi-pydantic" }, { name = "pydantic", extra = ["email"] }, + { name = "pyperclip" }, { name = "python-dotenv" }, { name = "rich" }, - { name = "typer" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/ac/4906336669a643b634b7ecde539c79126dff9f47b1e9e4b312dd170c1715/fastmcp-2.10.2.tar.gz", hash = "sha256:1ad519fda8bfde1de7f54a7ed4be0b6353eb66230cc1e35eef29b47666a0027b", size = 2738203, upload-time = "2025-07-05T17:30:59.437Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/44/f6/df91b178740108bcee4c6df732369dfa9250578a979caac4cbd1e5d8b42c/fastmcp-2.10.2-py3-none-any.whl", hash = "sha256:3e5929772d5d22bad03581c2c4db40e008926309180168d48f3a7eac678ae645", size = 185331, upload-time = "2025-07-05T17:30:58.21Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/ec/08/7e9c8dc9c2712ccc6393383ef6d7999b84f658ee37cabc42f853e72f86e1/fastmcp-2.10.5.tar.gz", hash = "sha256:f829e0b11c4d136db1d81e20e8acb19cf5108f64059482d1853f3c940326cf04", size = 1618410, upload-time = "2025-07-11T22:23:32.968Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/74/453a1e6d7673b831a04ac0167d34a3c21cf2a17d55b4d242f262474fff1f/fastmcp-2.10.5-py3-none-any.whl", hash = "sha256:ab218f6a66b61f6f83c413d37aa18f5c30882c44c8925f39ecd02dd855826540", size = 201275, upload-time = "2025-07-11T22:23:31.314Z" }, +] + +[[package]] +name = "filelock" +version = "3.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0a/10/c23352565a6544bdc5353e0b15fc1c563352101f30e24bf500207a54df9a/filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2", size = 18075, upload-time = "2025-03-14T07:11:40.47Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/36/2a115987e2d8c300a974597416d9de88f2444426de9571f4b59b2cca3acc/filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de", size = 16215, upload-time = "2025-03-14T07:11:39.145Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/79/b1/b64018016eeb087db503b038296fd782586432b9c077fc5c7839e9cb6ef6/frozenlist-1.7.0.tar.gz", hash = "sha256:2e310d81923c2437ea8670467121cc3e9b0f76d3043cc1d2331d56c7fb7a3a8f", size = 45078, upload-time = "2025-06-09T23:02:35.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/36/0da0a49409f6b47cc2d060dc8c9040b897b5902a8a4e37d9bc1deb11f680/frozenlist-1.7.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cc4df77d638aa2ed703b878dd093725b72a824c3c546c076e8fdf276f78ee84a", size = 81304, upload-time = "2025-06-09T22:59:46.226Z" }, + { url = "https://files.pythonhosted.org/packages/77/f0/77c11d13d39513b298e267b22eb6cb559c103d56f155aa9a49097221f0b6/frozenlist-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:716a9973a2cc963160394f701964fe25012600f3d311f60c790400b00e568b61", size = 47735, upload-time = "2025-06-09T22:59:48.133Z" }, + { url = "https://files.pythonhosted.org/packages/37/12/9d07fa18971a44150593de56b2f2947c46604819976784bcf6ea0d5db43b/frozenlist-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a0fd1bad056a3600047fb9462cff4c5322cebc59ebf5d0a3725e0ee78955001d", size = 46775, upload-time = "2025-06-09T22:59:49.564Z" }, + { url = "https://files.pythonhosted.org/packages/70/34/f73539227e06288fcd1f8a76853e755b2b48bca6747e99e283111c18bcd4/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3789ebc19cb811163e70fe2bd354cea097254ce6e707ae42e56f45e31e96cb8e", size = 224644, upload-time = "2025-06-09T22:59:51.35Z" }, + { url = "https://files.pythonhosted.org/packages/fb/68/c1d9c2f4a6e438e14613bad0f2973567586610cc22dcb1e1241da71de9d3/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af369aa35ee34f132fcfad5be45fbfcde0e3a5f6a1ec0712857f286b7d20cca9", size = 222125, upload-time = "2025-06-09T22:59:52.884Z" }, + { url = "https://files.pythonhosted.org/packages/b9/d0/98e8f9a515228d708344d7c6986752be3e3192d1795f748c24bcf154ad99/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac64b6478722eeb7a3313d494f8342ef3478dff539d17002f849101b212ef97c", size = 233455, upload-time = "2025-06-09T22:59:54.74Z" }, + { url = "https://files.pythonhosted.org/packages/79/df/8a11bcec5600557f40338407d3e5bea80376ed1c01a6c0910fcfdc4b8993/frozenlist-1.7.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f89f65d85774f1797239693cef07ad4c97fdd0639544bad9ac4b869782eb1981", size = 227339, upload-time = "2025-06-09T22:59:56.187Z" }, + { url = "https://files.pythonhosted.org/packages/50/82/41cb97d9c9a5ff94438c63cc343eb7980dac4187eb625a51bdfdb7707314/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1073557c941395fdfcfac13eb2456cb8aad89f9de27bae29fabca8e563b12615", size = 212969, upload-time = "2025-06-09T22:59:57.604Z" }, + { url = "https://files.pythonhosted.org/packages/13/47/f9179ee5ee4f55629e4f28c660b3fdf2775c8bfde8f9c53f2de2d93f52a9/frozenlist-1.7.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed8d2fa095aae4bdc7fdd80351009a48d286635edffee66bf865e37a9125c50", size = 222862, upload-time = "2025-06-09T22:59:59.498Z" }, + { url = "https://files.pythonhosted.org/packages/1a/52/df81e41ec6b953902c8b7e3a83bee48b195cb0e5ec2eabae5d8330c78038/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:24c34bea555fe42d9f928ba0a740c553088500377448febecaa82cc3e88aa1fa", size = 222492, upload-time = "2025-06-09T23:00:01.026Z" }, + { url = "https://files.pythonhosted.org/packages/84/17/30d6ea87fa95a9408245a948604b82c1a4b8b3e153cea596421a2aef2754/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:69cac419ac6a6baad202c85aaf467b65ac860ac2e7f2ac1686dc40dbb52f6577", size = 238250, upload-time = "2025-06-09T23:00:03.401Z" }, + { url = "https://files.pythonhosted.org/packages/8f/00/ecbeb51669e3c3df76cf2ddd66ae3e48345ec213a55e3887d216eb4fbab3/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:960d67d0611f4c87da7e2ae2eacf7ea81a5be967861e0c63cf205215afbfac59", size = 218720, upload-time = "2025-06-09T23:00:05.282Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c0/c224ce0e0eb31cc57f67742071bb470ba8246623c1823a7530be0e76164c/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:41be2964bd4b15bf575e5daee5a5ce7ed3115320fb3c2b71fca05582ffa4dc9e", size = 232585, upload-time = "2025-06-09T23:00:07.962Z" }, + { url = "https://files.pythonhosted.org/packages/55/3c/34cb694abf532f31f365106deebdeac9e45c19304d83cf7d51ebbb4ca4d1/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:46d84d49e00c9429238a7ce02dc0be8f6d7cd0cd405abd1bebdc991bf27c15bd", size = 234248, upload-time = "2025-06-09T23:00:09.428Z" }, + { url = "https://files.pythonhosted.org/packages/98/c0/2052d8b6cecda2e70bd81299e3512fa332abb6dcd2969b9c80dfcdddbf75/frozenlist-1.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:15900082e886edb37480335d9d518cec978afc69ccbc30bd18610b7c1b22a718", size = 221621, upload-time = "2025-06-09T23:00:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bf/7dcebae315436903b1d98ffb791a09d674c88480c158aa171958a3ac07f0/frozenlist-1.7.0-cp310-cp310-win32.whl", hash = "sha256:400ddd24ab4e55014bba442d917203c73b2846391dd42ca5e38ff52bb18c3c5e", size = 39578, upload-time = "2025-06-09T23:00:13.526Z" }, + { url = "https://files.pythonhosted.org/packages/8f/5f/f69818f017fa9a3d24d1ae39763e29b7f60a59e46d5f91b9c6b21622f4cd/frozenlist-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:6eb93efb8101ef39d32d50bce242c84bcbddb4f7e9febfa7b524532a239b4464", size = 43830, upload-time = "2025-06-09T23:00:14.98Z" }, + { url = "https://files.pythonhosted.org/packages/34/7e/803dde33760128acd393a27eb002f2020ddb8d99d30a44bfbaab31c5f08a/frozenlist-1.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:aa51e147a66b2d74de1e6e2cf5921890de6b0f4820b257465101d7f37b49fb5a", size = 82251, upload-time = "2025-06-09T23:00:16.279Z" }, + { url = "https://files.pythonhosted.org/packages/75/a9/9c2c5760b6ba45eae11334db454c189d43d34a4c0b489feb2175e5e64277/frozenlist-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b35db7ce1cd71d36ba24f80f0c9e7cff73a28d7a74e91fe83e23d27c7828750", size = 48183, upload-time = "2025-06-09T23:00:17.698Z" }, + { url = "https://files.pythonhosted.org/packages/47/be/4038e2d869f8a2da165f35a6befb9158c259819be22eeaf9c9a8f6a87771/frozenlist-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34a69a85e34ff37791e94542065c8416c1afbf820b68f720452f636d5fb990cd", size = 47107, upload-time = "2025-06-09T23:00:18.952Z" }, + { url = "https://files.pythonhosted.org/packages/79/26/85314b8a83187c76a37183ceed886381a5f992975786f883472fcb6dc5f2/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a646531fa8d82c87fe4bb2e596f23173caec9185bfbca5d583b4ccfb95183e2", size = 237333, upload-time = "2025-06-09T23:00:20.275Z" }, + { url = "https://files.pythonhosted.org/packages/1f/fd/e5b64f7d2c92a41639ffb2ad44a6a82f347787abc0c7df5f49057cf11770/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:79b2ffbba483f4ed36a0f236ccb85fbb16e670c9238313709638167670ba235f", size = 231724, upload-time = "2025-06-09T23:00:21.705Z" }, + { url = "https://files.pythonhosted.org/packages/20/fb/03395c0a43a5976af4bf7534759d214405fbbb4c114683f434dfdd3128ef/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a26f205c9ca5829cbf82bb2a84b5c36f7184c4316617d7ef1b271a56720d6b30", size = 245842, upload-time = "2025-06-09T23:00:23.148Z" }, + { url = "https://files.pythonhosted.org/packages/d0/15/c01c8e1dffdac5d9803507d824f27aed2ba76b6ed0026fab4d9866e82f1f/frozenlist-1.7.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bcacfad3185a623fa11ea0e0634aac7b691aa925d50a440f39b458e41c561d98", size = 239767, upload-time = "2025-06-09T23:00:25.103Z" }, + { url = "https://files.pythonhosted.org/packages/14/99/3f4c6fe882c1f5514b6848aa0a69b20cb5e5d8e8f51a339d48c0e9305ed0/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72c1b0fe8fe451b34f12dce46445ddf14bd2a5bcad7e324987194dc8e3a74c86", size = 224130, upload-time = "2025-06-09T23:00:27.061Z" }, + { url = "https://files.pythonhosted.org/packages/4d/83/220a374bd7b2aeba9d0725130665afe11de347d95c3620b9b82cc2fcab97/frozenlist-1.7.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61d1a5baeaac6c0798ff6edfaeaa00e0e412d49946c53fae8d4b8e8b3566c4ae", size = 235301, upload-time = "2025-06-09T23:00:29.02Z" }, + { url = "https://files.pythonhosted.org/packages/03/3c/3e3390d75334a063181625343e8daab61b77e1b8214802cc4e8a1bb678fc/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7edf5c043c062462f09b6820de9854bf28cc6cc5b6714b383149745e287181a8", size = 234606, upload-time = "2025-06-09T23:00:30.514Z" }, + { url = "https://files.pythonhosted.org/packages/23/1e/58232c19608b7a549d72d9903005e2d82488f12554a32de2d5fb59b9b1ba/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:d50ac7627b3a1bd2dcef6f9da89a772694ec04d9a61b66cf87f7d9446b4a0c31", size = 248372, upload-time = "2025-06-09T23:00:31.966Z" }, + { url = "https://files.pythonhosted.org/packages/c0/a4/e4a567e01702a88a74ce8a324691e62a629bf47d4f8607f24bf1c7216e7f/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce48b2fece5aeb45265bb7a58259f45027db0abff478e3077e12b05b17fb9da7", size = 229860, upload-time = "2025-06-09T23:00:33.375Z" }, + { url = "https://files.pythonhosted.org/packages/73/a6/63b3374f7d22268b41a9db73d68a8233afa30ed164c46107b33c4d18ecdd/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:fe2365ae915a1fafd982c146754e1de6ab3478def8a59c86e1f7242d794f97d5", size = 245893, upload-time = "2025-06-09T23:00:35.002Z" }, + { url = "https://files.pythonhosted.org/packages/6d/eb/d18b3f6e64799a79673c4ba0b45e4cfbe49c240edfd03a68be20002eaeaa/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:45a6f2fdbd10e074e8814eb98b05292f27bad7d1883afbe009d96abdcf3bc898", size = 246323, upload-time = "2025-06-09T23:00:36.468Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f5/720f3812e3d06cd89a1d5db9ff6450088b8f5c449dae8ffb2971a44da506/frozenlist-1.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:21884e23cffabb157a9dd7e353779077bf5b8f9a58e9b262c6caad2ef5f80a56", size = 233149, upload-time = "2025-06-09T23:00:37.963Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/03efbf545e217d5db8446acfd4c447c15b7c8cf4dbd4a58403111df9322d/frozenlist-1.7.0-cp311-cp311-win32.whl", hash = "sha256:284d233a8953d7b24f9159b8a3496fc1ddc00f4db99c324bd5fb5f22d8698ea7", size = 39565, upload-time = "2025-06-09T23:00:39.753Z" }, + { url = "https://files.pythonhosted.org/packages/58/17/fe61124c5c333ae87f09bb67186d65038834a47d974fc10a5fadb4cc5ae1/frozenlist-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:387cbfdcde2f2353f19c2f66bbb52406d06ed77519ac7ee21be0232147c2592d", size = 44019, upload-time = "2025-06-09T23:00:40.988Z" }, + { url = "https://files.pythonhosted.org/packages/ef/a2/c8131383f1e66adad5f6ecfcce383d584ca94055a34d683bbb24ac5f2f1c/frozenlist-1.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3dbf9952c4bb0e90e98aec1bd992b3318685005702656bc6f67c1a32b76787f2", size = 81424, upload-time = "2025-06-09T23:00:42.24Z" }, + { url = "https://files.pythonhosted.org/packages/4c/9d/02754159955088cb52567337d1113f945b9e444c4960771ea90eb73de8db/frozenlist-1.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1f5906d3359300b8a9bb194239491122e6cf1444c2efb88865426f170c262cdb", size = 47952, upload-time = "2025-06-09T23:00:43.481Z" }, + { url = "https://files.pythonhosted.org/packages/01/7a/0046ef1bd6699b40acd2067ed6d6670b4db2f425c56980fa21c982c2a9db/frozenlist-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3dabd5a8f84573c8d10d8859a50ea2dec01eea372031929871368c09fa103478", size = 46688, upload-time = "2025-06-09T23:00:44.793Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a2/a910bafe29c86997363fb4c02069df4ff0b5bc39d33c5198b4e9dd42d8f8/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa57daa5917f1738064f302bf2626281a1cb01920c32f711fbc7bc36111058a8", size = 243084, upload-time = "2025-06-09T23:00:46.125Z" }, + { url = "https://files.pythonhosted.org/packages/64/3e/5036af9d5031374c64c387469bfcc3af537fc0f5b1187d83a1cf6fab1639/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c193dda2b6d49f4c4398962810fa7d7c78f032bf45572b3e04dd5249dff27e08", size = 233524, upload-time = "2025-06-09T23:00:47.73Z" }, + { url = "https://files.pythonhosted.org/packages/06/39/6a17b7c107a2887e781a48ecf20ad20f1c39d94b2a548c83615b5b879f28/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bfe2b675cf0aaa6d61bf8fbffd3c274b3c9b7b1623beb3809df8a81399a4a9c4", size = 248493, upload-time = "2025-06-09T23:00:49.742Z" }, + { url = "https://files.pythonhosted.org/packages/be/00/711d1337c7327d88c44d91dd0f556a1c47fb99afc060ae0ef66b4d24793d/frozenlist-1.7.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8fc5d5cda37f62b262405cf9652cf0856839c4be8ee41be0afe8858f17f4c94b", size = 244116, upload-time = "2025-06-09T23:00:51.352Z" }, + { url = "https://files.pythonhosted.org/packages/24/fe/74e6ec0639c115df13d5850e75722750adabdc7de24e37e05a40527ca539/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0d5ce521d1dd7d620198829b87ea002956e4319002ef0bc8d3e6d045cb4646e", size = 224557, upload-time = "2025-06-09T23:00:52.855Z" }, + { url = "https://files.pythonhosted.org/packages/8d/db/48421f62a6f77c553575201e89048e97198046b793f4a089c79a6e3268bd/frozenlist-1.7.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:488d0a7d6a0008ca0db273c542098a0fa9e7dfaa7e57f70acef43f32b3f69dca", size = 241820, upload-time = "2025-06-09T23:00:54.43Z" }, + { url = "https://files.pythonhosted.org/packages/1d/fa/cb4a76bea23047c8462976ea7b7a2bf53997a0ca171302deae9d6dd12096/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:15a7eaba63983d22c54d255b854e8108e7e5f3e89f647fc854bd77a237e767df", size = 236542, upload-time = "2025-06-09T23:00:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/5d/32/476a4b5cfaa0ec94d3f808f193301debff2ea42288a099afe60757ef6282/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1eaa7e9c6d15df825bf255649e05bd8a74b04a4d2baa1ae46d9c2d00b2ca2cb5", size = 249350, upload-time = "2025-06-09T23:00:58.468Z" }, + { url = "https://files.pythonhosted.org/packages/8d/ba/9a28042f84a6bf8ea5dbc81cfff8eaef18d78b2a1ad9d51c7bc5b029ad16/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e4389e06714cfa9d47ab87f784a7c5be91d3934cd6e9a7b85beef808297cc025", size = 225093, upload-time = "2025-06-09T23:01:00.015Z" }, + { url = "https://files.pythonhosted.org/packages/bc/29/3a32959e68f9cf000b04e79ba574527c17e8842e38c91d68214a37455786/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:73bd45e1488c40b63fe5a7df892baf9e2a4d4bb6409a2b3b78ac1c6236178e01", size = 245482, upload-time = "2025-06-09T23:01:01.474Z" }, + { url = "https://files.pythonhosted.org/packages/80/e8/edf2f9e00da553f07f5fa165325cfc302dead715cab6ac8336a5f3d0adc2/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:99886d98e1643269760e5fe0df31e5ae7050788dd288947f7f007209b8c33f08", size = 249590, upload-time = "2025-06-09T23:01:02.961Z" }, + { url = "https://files.pythonhosted.org/packages/1c/80/9a0eb48b944050f94cc51ee1c413eb14a39543cc4f760ed12657a5a3c45a/frozenlist-1.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:290a172aae5a4c278c6da8a96222e6337744cd9c77313efe33d5670b9f65fc43", size = 237785, upload-time = "2025-06-09T23:01:05.095Z" }, + { url = "https://files.pythonhosted.org/packages/f3/74/87601e0fb0369b7a2baf404ea921769c53b7ae00dee7dcfe5162c8c6dbf0/frozenlist-1.7.0-cp312-cp312-win32.whl", hash = "sha256:426c7bc70e07cfebc178bc4c2bf2d861d720c4fff172181eeb4a4c41d4ca2ad3", size = 39487, upload-time = "2025-06-09T23:01:06.54Z" }, + { url = "https://files.pythonhosted.org/packages/0b/15/c026e9a9fc17585a9d461f65d8593d281fedf55fbf7eb53f16c6df2392f9/frozenlist-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:563b72efe5da92e02eb68c59cb37205457c977aa7a449ed1b37e6939e5c47c6a", size = 43874, upload-time = "2025-06-09T23:01:07.752Z" }, + { url = "https://files.pythonhosted.org/packages/24/90/6b2cebdabdbd50367273c20ff6b57a3dfa89bd0762de02c3a1eb42cb6462/frozenlist-1.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee80eeda5e2a4e660651370ebffd1286542b67e268aa1ac8d6dbe973120ef7ee", size = 79791, upload-time = "2025-06-09T23:01:09.368Z" }, + { url = "https://files.pythonhosted.org/packages/83/2e/5b70b6a3325363293fe5fc3ae74cdcbc3e996c2a11dde2fd9f1fb0776d19/frozenlist-1.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d1a81c85417b914139e3a9b995d4a1c84559afc839a93cf2cb7f15e6e5f6ed2d", size = 47165, upload-time = "2025-06-09T23:01:10.653Z" }, + { url = "https://files.pythonhosted.org/packages/f4/25/a0895c99270ca6966110f4ad98e87e5662eab416a17e7fd53c364bf8b954/frozenlist-1.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cbb65198a9132ebc334f237d7b0df163e4de83fb4f2bdfe46c1e654bdb0c5d43", size = 45881, upload-time = "2025-06-09T23:01:12.296Z" }, + { url = "https://files.pythonhosted.org/packages/19/7c/71bb0bbe0832793c601fff68cd0cf6143753d0c667f9aec93d3c323f4b55/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dab46c723eeb2c255a64f9dc05b8dd601fde66d6b19cdb82b2e09cc6ff8d8b5d", size = 232409, upload-time = "2025-06-09T23:01:13.641Z" }, + { url = "https://files.pythonhosted.org/packages/c0/45/ed2798718910fe6eb3ba574082aaceff4528e6323f9a8570be0f7028d8e9/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6aeac207a759d0dedd2e40745575ae32ab30926ff4fa49b1635def65806fddee", size = 225132, upload-time = "2025-06-09T23:01:15.264Z" }, + { url = "https://files.pythonhosted.org/packages/ba/e2/8417ae0f8eacb1d071d4950f32f229aa6bf68ab69aab797b72a07ea68d4f/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bd8c4e58ad14b4fa7802b8be49d47993182fdd4023393899632c88fd8cd994eb", size = 237638, upload-time = "2025-06-09T23:01:16.752Z" }, + { url = "https://files.pythonhosted.org/packages/f8/b7/2ace5450ce85f2af05a871b8c8719b341294775a0a6c5585d5e6170f2ce7/frozenlist-1.7.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:04fb24d104f425da3540ed83cbfc31388a586a7696142004c577fa61c6298c3f", size = 233539, upload-time = "2025-06-09T23:01:18.202Z" }, + { url = "https://files.pythonhosted.org/packages/46/b9/6989292c5539553dba63f3c83dc4598186ab2888f67c0dc1d917e6887db6/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6a5c505156368e4ea6b53b5ac23c92d7edc864537ff911d2fb24c140bb175e60", size = 215646, upload-time = "2025-06-09T23:01:19.649Z" }, + { url = "https://files.pythonhosted.org/packages/72/31/bc8c5c99c7818293458fe745dab4fd5730ff49697ccc82b554eb69f16a24/frozenlist-1.7.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bd7eb96a675f18aa5c553eb7ddc24a43c8c18f22e1f9925528128c052cdbe00", size = 232233, upload-time = "2025-06-09T23:01:21.175Z" }, + { url = "https://files.pythonhosted.org/packages/59/52/460db4d7ba0811b9ccb85af996019f5d70831f2f5f255f7cc61f86199795/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:05579bf020096fe05a764f1f84cd104a12f78eaab68842d036772dc6d4870b4b", size = 227996, upload-time = "2025-06-09T23:01:23.098Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c9/f4b39e904c03927b7ecf891804fd3b4df3db29b9e487c6418e37988d6e9d/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:376b6222d114e97eeec13d46c486facd41d4f43bab626b7c3f6a8b4e81a5192c", size = 242280, upload-time = "2025-06-09T23:01:24.808Z" }, + { url = "https://files.pythonhosted.org/packages/b8/33/3f8d6ced42f162d743e3517781566b8481322be321b486d9d262adf70bfb/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0aa7e176ebe115379b5b1c95b4096fb1c17cce0847402e227e712c27bdb5a949", size = 217717, upload-time = "2025-06-09T23:01:26.28Z" }, + { url = "https://files.pythonhosted.org/packages/3e/e8/ad683e75da6ccef50d0ab0c2b2324b32f84fc88ceee778ed79b8e2d2fe2e/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:3fbba20e662b9c2130dc771e332a99eff5da078b2b2648153a40669a6d0e36ca", size = 236644, upload-time = "2025-06-09T23:01:27.887Z" }, + { url = "https://files.pythonhosted.org/packages/b2/14/8d19ccdd3799310722195a72ac94ddc677541fb4bef4091d8e7775752360/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:f3f4410a0a601d349dd406b5713fec59b4cee7e71678d5b17edda7f4655a940b", size = 238879, upload-time = "2025-06-09T23:01:29.524Z" }, + { url = "https://files.pythonhosted.org/packages/ce/13/c12bf657494c2fd1079a48b2db49fa4196325909249a52d8f09bc9123fd7/frozenlist-1.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e2cdfaaec6a2f9327bf43c933c0319a7c429058e8537c508964a133dffee412e", size = 232502, upload-time = "2025-06-09T23:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/d7/8b/e7f9dfde869825489382bc0d512c15e96d3964180c9499efcec72e85db7e/frozenlist-1.7.0-cp313-cp313-win32.whl", hash = "sha256:5fc4df05a6591c7768459caba1b342d9ec23fa16195e744939ba5914596ae3e1", size = 39169, upload-time = "2025-06-09T23:01:35.503Z" }, + { url = "https://files.pythonhosted.org/packages/35/89/a487a98d94205d85745080a37860ff5744b9820a2c9acbcdd9440bfddf98/frozenlist-1.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:52109052b9791a3e6b5d1b65f4b909703984b770694d3eb64fad124c835d7cba", size = 43219, upload-time = "2025-06-09T23:01:36.784Z" }, + { url = "https://files.pythonhosted.org/packages/56/d5/5c4cf2319a49eddd9dd7145e66c4866bdc6f3dbc67ca3d59685149c11e0d/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a6f86e4193bb0e235ef6ce3dde5cbabed887e0b11f516ce8a0f4d3b33078ec2d", size = 84345, upload-time = "2025-06-09T23:01:38.295Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7d/ec2c1e1dc16b85bc9d526009961953df9cec8481b6886debb36ec9107799/frozenlist-1.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:82d664628865abeb32d90ae497fb93df398a69bb3434463d172b80fc25b0dd7d", size = 48880, upload-time = "2025-06-09T23:01:39.887Z" }, + { url = "https://files.pythonhosted.org/packages/69/86/f9596807b03de126e11e7d42ac91e3d0b19a6599c714a1989a4e85eeefc4/frozenlist-1.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:912a7e8375a1c9a68325a902f3953191b7b292aa3c3fb0d71a216221deca460b", size = 48498, upload-time = "2025-06-09T23:01:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/5e/cb/df6de220f5036001005f2d726b789b2c0b65f2363b104bbc16f5be8084f8/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9537c2777167488d539bc5de2ad262efc44388230e5118868e172dd4a552b146", size = 292296, upload-time = "2025-06-09T23:01:42.685Z" }, + { url = "https://files.pythonhosted.org/packages/83/1f/de84c642f17c8f851a2905cee2dae401e5e0daca9b5ef121e120e19aa825/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f34560fb1b4c3e30ba35fa9a13894ba39e5acfc5f60f57d8accde65f46cc5e74", size = 273103, upload-time = "2025-06-09T23:01:44.166Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/c840bfa474ba3fa13c772b93070893c6e9d5c0350885760376cbe3b6c1b3/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acd03d224b0175f5a850edc104ac19040d35419eddad04e7cf2d5986d98427f1", size = 292869, upload-time = "2025-06-09T23:01:45.681Z" }, + { url = "https://files.pythonhosted.org/packages/a6/1c/3efa6e7d5a39a1d5ef0abeb51c48fb657765794a46cf124e5aca2c7a592c/frozenlist-1.7.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2038310bc582f3d6a09b3816ab01737d60bf7b1ec70f5356b09e84fb7408ab1", size = 291467, upload-time = "2025-06-09T23:01:47.234Z" }, + { url = "https://files.pythonhosted.org/packages/4f/00/d5c5e09d4922c395e2f2f6b79b9a20dab4b67daaf78ab92e7729341f61f6/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8c05e4c8e5f36e5e088caa1bf78a687528f83c043706640a92cb76cd6999384", size = 266028, upload-time = "2025-06-09T23:01:48.819Z" }, + { url = "https://files.pythonhosted.org/packages/4e/27/72765be905619dfde25a7f33813ac0341eb6b076abede17a2e3fbfade0cb/frozenlist-1.7.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:765bb588c86e47d0b68f23c1bee323d4b703218037765dcf3f25c838c6fecceb", size = 284294, upload-time = "2025-06-09T23:01:50.394Z" }, + { url = "https://files.pythonhosted.org/packages/88/67/c94103a23001b17808eb7dd1200c156bb69fb68e63fcf0693dde4cd6228c/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:32dc2e08c67d86d0969714dd484fd60ff08ff81d1a1e40a77dd34a387e6ebc0c", size = 281898, upload-time = "2025-06-09T23:01:52.234Z" }, + { url = "https://files.pythonhosted.org/packages/42/34/a3e2c00c00f9e2a9db5653bca3fec306349e71aff14ae45ecc6d0951dd24/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c0303e597eb5a5321b4de9c68e9845ac8f290d2ab3f3e2c864437d3c5a30cd65", size = 290465, upload-time = "2025-06-09T23:01:53.788Z" }, + { url = "https://files.pythonhosted.org/packages/bb/73/f89b7fbce8b0b0c095d82b008afd0590f71ccb3dee6eee41791cf8cd25fd/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:a47f2abb4e29b3a8d0b530f7c3598badc6b134562b1a5caee867f7c62fee51e3", size = 266385, upload-time = "2025-06-09T23:01:55.769Z" }, + { url = "https://files.pythonhosted.org/packages/cd/45/e365fdb554159462ca12df54bc59bfa7a9a273ecc21e99e72e597564d1ae/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3d688126c242a6fabbd92e02633414d40f50bb6002fa4cf995a1d18051525657", size = 288771, upload-time = "2025-06-09T23:01:57.4Z" }, + { url = "https://files.pythonhosted.org/packages/00/11/47b6117002a0e904f004d70ec5194fe9144f117c33c851e3d51c765962d0/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:4e7e9652b3d367c7bd449a727dc79d5043f48b88d0cbfd4f9f1060cf2b414104", size = 288206, upload-time = "2025-06-09T23:01:58.936Z" }, + { url = "https://files.pythonhosted.org/packages/40/37/5f9f3c3fd7f7746082ec67bcdc204db72dad081f4f83a503d33220a92973/frozenlist-1.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1a85e345b4c43db8b842cab1feb41be5cc0b10a1830e6295b69d7310f99becaf", size = 282620, upload-time = "2025-06-09T23:02:00.493Z" }, + { url = "https://files.pythonhosted.org/packages/0b/31/8fbc5af2d183bff20f21aa743b4088eac4445d2bb1cdece449ae80e4e2d1/frozenlist-1.7.0-cp313-cp313t-win32.whl", hash = "sha256:3a14027124ddb70dfcee5148979998066897e79f89f64b13328595c4bdf77c81", size = 43059, upload-time = "2025-06-09T23:02:02.072Z" }, + { url = "https://files.pythonhosted.org/packages/bb/ed/41956f52105b8dbc26e457c5705340c67c8cc2b79f394b79bffc09d0e938/frozenlist-1.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3bf8010d71d4507775f658e9823210b7427be36625b387221642725b515dcf3e", size = 47516, upload-time = "2025-06-09T23:02:03.779Z" }, + { url = "https://files.pythonhosted.org/packages/ee/45/b82e3c16be2182bff01179db177fe144d58b5dc787a7d4492c6ed8b9317f/frozenlist-1.7.0-py3-none-any.whl", hash = "sha256:9a5af342e34f7e97caf8c995864c7a396418ae2859cc6fdf1b1073020d516a7e", size = 13106, upload-time = "2025-06-09T23:02:34.204Z" }, ] [[package]] @@ -399,7 +759,7 @@ wheels = [ [[package]] name = "google-cloud-bigquery" -version = "3.34.0" +version = "3.35.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core", extra = ["grpc"] }, @@ -410,9 +770,9 @@ dependencies = [ { name = "python-dateutil" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/24/f9/e9da2d56d7028f05c0e2f5edf6ce43c773220c3172666c3dd925791d763d/google_cloud_bigquery-3.34.0.tar.gz", hash = "sha256:5ee1a78ba5c2ccb9f9a8b2bf3ed76b378ea68f49b6cac0544dc55cc97ff7c1ce", size = 489091, upload-time = "2025-05-29T17:18:06.03Z" } +sdist = { url = "https://files.pythonhosted.org/packages/15/ee/fc5e651899abd7b7c631afc270fc668c4d757d27403c8ec2c11f0588f226/google_cloud_bigquery-3.35.0.tar.gz", hash = "sha256:b3db627355303ac52e07548d448d6c6cb87e52d80c88e57599cdd64185f40664", size = 496456, upload-time = "2025-07-16T00:36:44.83Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/7e/7115c4f67ca0bc678f25bff1eab56cc37d06eb9a3978940b2ebd0705aa0a/google_cloud_bigquery-3.34.0-py3-none-any.whl", hash = "sha256:de20ded0680f8136d92ff5256270b5920dfe4fae479f5d0f73e90e5df30b1cf7", size = 253555, upload-time = "2025-05-29T17:18:02.904Z" }, + { url = "https://files.pythonhosted.org/packages/95/2c/663be60fe7c4090d84267a17204fceaa4efd541000325d4f9690f6c6fcdc/google_cloud_bigquery-3.35.0-py3-none-any.whl", hash = "sha256:8c98e304d47c82f1fbba77b2f4c1e6c458474842d713ee117d9c58e61b74a70d", size = 256874, upload-time = "2025-07-16T00:36:43.292Z" }, ] [[package]] @@ -646,6 +1006,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/25/0a/6269e3473b09aed2dab8aa1a600c70f31f00ae1349bee30658f7e358a159/httpx_sse-0.4.1-py3-none-any.whl", hash = "sha256:cba42174344c3a5b06f255ce65b350880f962d99ead85e776f23c6618a377a37", size = 8054, upload-time = "2025-06-24T13:21:04.772Z" }, ] +[[package]] +name = "identify" +version = "2.6.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, +] + [[package]] name = "idna" version = "3.10" @@ -655,9 +1024,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + [[package]] name = "jsonschema" -version = "4.24.0" +version = "4.24.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "attrs" }, @@ -665,9 +1043,9 @@ dependencies = [ { name = "referencing" }, { name = "rpds-py" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/bf/d3/1cf5326b923a53515d8f3a2cd442e6d7e94fcc444716e879ea70a0ce3177/jsonschema-4.24.0.tar.gz", hash = "sha256:0b4e8069eb12aedfa881333004bccaec24ecef5a8a6a4b6df142b2cc9599d196", size = 353480, upload-time = "2025-05-26T18:48:10.459Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/6e/35174c1d3f30560848c82d3c233c01420e047d70925c897a4d6e932b4898/jsonschema-4.24.1.tar.gz", hash = "sha256:fe45a130cc7f67cd0d67640b4e7e3e2e666919462ae355eda238296eafeb4b5d", size = 356635, upload-time = "2025-07-17T14:40:01.05Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a2/3d/023389198f69c722d039351050738d6755376c8fd343e91dc493ea485905/jsonschema-4.24.0-py3-none-any.whl", hash = "sha256:a462455f19f5faf404a7902952b6f0e3ce868f3ee09a359b05eca6673bd8412d", size = 88709, upload-time = "2025-05-26T18:48:08.417Z" }, + { url = "https://files.pythonhosted.org/packages/85/7f/ea48ffb58f9791f9d97ccb35e42fea1ebc81c67ce36dc4b8b2eee60e8661/jsonschema-4.24.1-py3-none-any.whl", hash = "sha256:6b916866aa0b61437785f1277aa2cbd63512e8d4b47151072ef13292049b4627", size = 89060, upload-time = "2025-07-17T14:39:59.471Z" }, ] [[package]] @@ -687,7 +1065,9 @@ name = "m3-mcp" source = { editable = "." } dependencies = [ { name = "appdirs" }, + { name = "beartype" }, { name = "beautifulsoup4" }, + { name = "click" }, { name = "cryptography" }, { name = "db-dtypes" }, { name = "fastmcp" }, @@ -695,19 +1075,35 @@ dependencies = [ { name = "httpx" }, { name = "pandas" }, { name = "polars", extra = ["pyarrow"] }, + { name = "pyaml" }, { name = "pyjwt", extra = ["crypto"] }, { name = "python-jose", extra = ["cryptography"] }, { name = "requests" }, { name = "rich" }, + { name = "rich-pyfiglet" }, { name = "sqlalchemy" }, { name = "sqlparse" }, + { name = "thefuzz" }, { name = "typer" }, ] +[package.dev-dependencies] +dev = [ + { name = "aiohttp" }, + { name = "pre-commit" }, + { name = "pytest" }, + { name = "pytest-asyncio" }, + { name = "pytest-cov" }, + { name = "pytest-mock" }, + { name = "ruff" }, +] + [package.metadata] requires-dist = [ { name = "appdirs", specifier = ">=1.4.0" }, + { name = "beartype", specifier = ">=0.21.0" }, { name = "beautifulsoup4", specifier = ">=4.12.0" }, + { name = "click", specifier = "==8.1.8" }, { name = "cryptography", specifier = ">=41.0.0" }, { name = "db-dtypes", specifier = ">=1.0.0" }, { name = "fastmcp", specifier = ">=0.1.0" }, @@ -715,15 +1111,29 @@ requires-dist = [ { name = "httpx", specifier = ">=0.24.0" }, { name = "pandas", specifier = ">=2.0.0" }, { name = "polars", extras = ["pyarrow"], specifier = ">=0.20.10" }, + { name = "pyaml", specifier = ">=25.7.0" }, { name = "pyjwt", extras = ["crypto"], specifier = ">=2.8.0" }, { name = "python-jose", extras = ["cryptography"], specifier = ">=3.3.0" }, { name = "requests", specifier = ">=2.30.0" }, { name = "rich", specifier = ">=13.0.0" }, + { name = "rich-pyfiglet", specifier = ">=0.1.4" }, { name = "sqlalchemy", specifier = ">=2.0.0" }, { name = "sqlparse", specifier = ">=0.4.0" }, + { name = "thefuzz", specifier = ">=0.22.1" }, { name = "typer", specifier = ">=0.9.0" }, ] +[package.metadata.requires-dev] +dev = [ + { name = "aiohttp", specifier = ">=3.8.0" }, + { name = "pre-commit", specifier = ">=3.0.0" }, + { name = "pytest", specifier = ">=7.4.0" }, + { name = "pytest-asyncio", specifier = ">=0.23.0" }, + { name = "pytest-cov", specifier = ">=6.2.1" }, + { name = "pytest-mock", specifier = ">=3.10.0" }, + { name = "ruff", specifier = ">=0.4.0" }, +] + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -738,7 +1148,7 @@ wheels = [ [[package]] name = "mcp" -version = "1.10.1" +version = "1.12.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -748,13 +1158,14 @@ dependencies = [ { name = "pydantic" }, { name = "pydantic-settings" }, { name = "python-multipart" }, + { name = "pywin32", marker = "sys_platform == 'win32'" }, { name = "sse-starlette" }, { name = "starlette" }, { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/7c/68/63045305f29ff680a9cd5be360c755270109e6b76f696ea6824547ddbc30/mcp-1.10.1.tar.gz", hash = "sha256:aaa0957d8307feeff180da2d9d359f2b801f35c0c67f1882136239055ef034c2", size = 392969, upload-time = "2025-06-27T12:03:08.982Z" } +sdist = { url = "https://files.pythonhosted.org/packages/45/94/caa0f4754e2437f7033068989f13fee784856f95870c786b0b5c2c0f511e/mcp-1.12.0.tar.gz", hash = "sha256:853f6b17a3f31ea6e2f278c2ec7d3b38457bc80c7c2c675260dd7f04a6fd0e70", size = 424678, upload-time = "2025-07-17T19:46:35.522Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/3f/435a5b3d10ae242a9d6c2b33175551173c3c61fe637dc893be05c4ed0aaf/mcp-1.10.1-py3-none-any.whl", hash = "sha256:4d08301aefe906dce0fa482289db55ce1db831e3e67212e65b5e23ad8454b3c5", size = 150878, upload-time = "2025-06-27T12:03:07.328Z" }, + { url = "https://files.pythonhosted.org/packages/ed/da/c7eaab6a58f1034de115b7902141ad8f81b4f3bbf7dc0cc267594947a4d7/mcp-1.12.0-py3-none-any.whl", hash = "sha256:19a498b2bf273283e463b4dd1ed83f791fbba5c25bfa16b8b34cfd5571673e7f", size = 158470, upload-time = "2025-07-17T19:46:34.166Z" }, ] [[package]] @@ -766,6 +1177,117 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, ] +[[package]] +name = "multidict" +version = "6.6.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/2c/5dad12e82fbdf7470f29bff2171484bf07cb3b16ada60a6589af8f376440/multidict-6.6.3.tar.gz", hash = "sha256:798a9eb12dab0a6c2e29c1de6f3468af5cb2da6053a20dfa3344907eed0937cc", size = 101006, upload-time = "2025-06-30T15:53:46.929Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/67/414933982bce2efce7cbcb3169eaaf901e0f25baec69432b4874dfb1f297/multidict-6.6.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a2be5b7b35271f7fff1397204ba6708365e3d773579fe2a30625e16c4b4ce817", size = 77017, upload-time = "2025-06-30T15:50:58.931Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fe/d8a3ee1fad37dc2ef4f75488b0d9d4f25bf204aad8306cbab63d97bff64a/multidict-6.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12f4581d2930840295c461764b9a65732ec01250b46c6b2c510d7ee68872b140", size = 44897, upload-time = "2025-06-30T15:51:00.999Z" }, + { url = "https://files.pythonhosted.org/packages/1f/e0/265d89af8c98240265d82b8cbcf35897f83b76cd59ee3ab3879050fd8c45/multidict-6.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dd7793bab517e706c9ed9d7310b06c8672fd0aeee5781bfad612f56b8e0f7d14", size = 44574, upload-time = "2025-06-30T15:51:02.449Z" }, + { url = "https://files.pythonhosted.org/packages/e6/05/6b759379f7e8e04ccc97cfb2a5dcc5cdbd44a97f072b2272dc51281e6a40/multidict-6.6.3-cp310-cp310-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:72d8815f2cd3cf3df0f83cac3f3ef801d908b2d90409ae28102e0553af85545a", size = 225729, upload-time = "2025-06-30T15:51:03.794Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f5/8d5a15488edd9a91fa4aad97228d785df208ed6298580883aa3d9def1959/multidict-6.6.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:531e331a2ee53543ab32b16334e2deb26f4e6b9b28e41f8e0c87e99a6c8e2d69", size = 242515, upload-time = "2025-06-30T15:51:05.002Z" }, + { url = "https://files.pythonhosted.org/packages/6e/b5/a8f317d47d0ac5bb746d6d8325885c8967c2a8ce0bb57be5399e3642cccb/multidict-6.6.3-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:42ca5aa9329a63be8dc49040f63817d1ac980e02eeddba763a9ae5b4027b9c9c", size = 222224, upload-time = "2025-06-30T15:51:06.148Z" }, + { url = "https://files.pythonhosted.org/packages/76/88/18b2a0d5e80515fa22716556061189c2853ecf2aa2133081ebbe85ebea38/multidict-6.6.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:208b9b9757060b9faa6f11ab4bc52846e4f3c2fb8b14d5680c8aac80af3dc751", size = 253124, upload-time = "2025-06-30T15:51:07.375Z" }, + { url = "https://files.pythonhosted.org/packages/62/bf/ebfcfd6b55a1b05ef16d0775ae34c0fe15e8dab570d69ca9941073b969e7/multidict-6.6.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:acf6b97bd0884891af6a8b43d0f586ab2fcf8e717cbd47ab4bdddc09e20652d8", size = 251529, upload-time = "2025-06-30T15:51:08.691Z" }, + { url = "https://files.pythonhosted.org/packages/44/11/780615a98fd3775fc309d0234d563941af69ade2df0bb82c91dda6ddaea1/multidict-6.6.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:68e9e12ed00e2089725669bdc88602b0b6f8d23c0c95e52b95f0bc69f7fe9b55", size = 241627, upload-time = "2025-06-30T15:51:10.605Z" }, + { url = "https://files.pythonhosted.org/packages/28/3d/35f33045e21034b388686213752cabc3a1b9d03e20969e6fa8f1b1d82db1/multidict-6.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:05db2f66c9addb10cfa226e1acb363450fab2ff8a6df73c622fefe2f5af6d4e7", size = 239351, upload-time = "2025-06-30T15:51:12.18Z" }, + { url = "https://files.pythonhosted.org/packages/6e/cc/ff84c03b95b430015d2166d9aae775a3985d757b94f6635010d0038d9241/multidict-6.6.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:0db58da8eafb514db832a1b44f8fa7906fdd102f7d982025f816a93ba45e3dcb", size = 233429, upload-time = "2025-06-30T15:51:13.533Z" }, + { url = "https://files.pythonhosted.org/packages/2e/f0/8cd49a0b37bdea673a4b793c2093f2f4ba8e7c9d6d7c9bd672fd6d38cd11/multidict-6.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:14117a41c8fdb3ee19c743b1c027da0736fdb79584d61a766da53d399b71176c", size = 243094, upload-time = "2025-06-30T15:51:14.815Z" }, + { url = "https://files.pythonhosted.org/packages/96/19/5d9a0cfdafe65d82b616a45ae950975820289069f885328e8185e64283c2/multidict-6.6.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:877443eaaabcd0b74ff32ebeed6f6176c71850feb7d6a1d2db65945256ea535c", size = 248957, upload-time = "2025-06-30T15:51:16.076Z" }, + { url = "https://files.pythonhosted.org/packages/e6/dc/c90066151da87d1e489f147b9b4327927241e65f1876702fafec6729c014/multidict-6.6.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:70b72e749a4f6e7ed8fb334fa8d8496384840319512746a5f42fa0aec79f4d61", size = 243590, upload-time = "2025-06-30T15:51:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/ec/39/458afb0cccbb0ee9164365273be3e039efddcfcb94ef35924b7dbdb05db0/multidict-6.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:43571f785b86afd02b3855c5ac8e86ec921b760298d6f82ff2a61daf5a35330b", size = 237487, upload-time = "2025-06-30T15:51:19.039Z" }, + { url = "https://files.pythonhosted.org/packages/35/38/0016adac3990426610a081787011177e661875546b434f50a26319dc8372/multidict-6.6.3-cp310-cp310-win32.whl", hash = "sha256:20c5a0c3c13a15fd5ea86c42311859f970070e4e24de5a550e99d7c271d76318", size = 41390, upload-time = "2025-06-30T15:51:20.362Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d2/17897a8f3f2c5363d969b4c635aa40375fe1f09168dc09a7826780bfb2a4/multidict-6.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab0a34a007704c625e25a9116c6770b4d3617a071c8a7c30cd338dfbadfe6485", size = 45954, upload-time = "2025-06-30T15:51:21.383Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5f/d4a717c1e457fe44072e33fa400d2b93eb0f2819c4d669381f925b7cba1f/multidict-6.6.3-cp310-cp310-win_arm64.whl", hash = "sha256:769841d70ca8bdd140a715746199fc6473414bd02efd678d75681d2d6a8986c5", size = 42981, upload-time = "2025-06-30T15:51:22.809Z" }, + { url = "https://files.pythonhosted.org/packages/08/f0/1a39863ced51f639c81a5463fbfa9eb4df59c20d1a8769ab9ef4ca57ae04/multidict-6.6.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:18f4eba0cbac3546b8ae31e0bbc55b02c801ae3cbaf80c247fcdd89b456ff58c", size = 76445, upload-time = "2025-06-30T15:51:24.01Z" }, + { url = "https://files.pythonhosted.org/packages/c9/0e/a7cfa451c7b0365cd844e90b41e21fab32edaa1e42fc0c9f68461ce44ed7/multidict-6.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef43b5dd842382329e4797c46f10748d8c2b6e0614f46b4afe4aee9ac33159df", size = 44610, upload-time = "2025-06-30T15:51:25.158Z" }, + { url = "https://files.pythonhosted.org/packages/c6/bb/a14a4efc5ee748cc1904b0748be278c31b9295ce5f4d2ef66526f410b94d/multidict-6.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bd1fd5eec01494e0f2e8e446a74a85d5e49afb63d75a9934e4a5423dba21d", size = 44267, upload-time = "2025-06-30T15:51:26.326Z" }, + { url = "https://files.pythonhosted.org/packages/c2/f8/410677d563c2d55e063ef74fe578f9d53fe6b0a51649597a5861f83ffa15/multidict-6.6.3-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:5bd8d6f793a787153956cd35e24f60485bf0651c238e207b9a54f7458b16d539", size = 230004, upload-time = "2025-06-30T15:51:27.491Z" }, + { url = "https://files.pythonhosted.org/packages/fd/df/2b787f80059314a98e1ec6a4cc7576244986df3e56b3c755e6fc7c99e038/multidict-6.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bf99b4daf908c73856bd87ee0a2499c3c9a3d19bb04b9c6025e66af3fd07462", size = 247196, upload-time = "2025-06-30T15:51:28.762Z" }, + { url = "https://files.pythonhosted.org/packages/05/f2/f9117089151b9a8ab39f9019620d10d9718eec2ac89e7ca9d30f3ec78e96/multidict-6.6.3-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b9e59946b49dafaf990fd9c17ceafa62976e8471a14952163d10a7a630413a9", size = 225337, upload-time = "2025-06-30T15:51:30.025Z" }, + { url = "https://files.pythonhosted.org/packages/93/2d/7115300ec5b699faa152c56799b089a53ed69e399c3c2d528251f0aeda1a/multidict-6.6.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e2db616467070d0533832d204c54eea6836a5e628f2cb1e6dfd8cd6ba7277cb7", size = 257079, upload-time = "2025-06-30T15:51:31.716Z" }, + { url = "https://files.pythonhosted.org/packages/15/ea/ff4bab367623e39c20d3b07637225c7688d79e4f3cc1f3b9f89867677f9a/multidict-6.6.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:7394888236621f61dcdd25189b2768ae5cc280f041029a5bcf1122ac63df79f9", size = 255461, upload-time = "2025-06-30T15:51:33.029Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/2c9246cda322dfe08be85f1b8739646f2c4c5113a1422d7a407763422ec4/multidict-6.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f114d8478733ca7388e7c7e0ab34b72547476b97009d643644ac33d4d3fe1821", size = 246611, upload-time = "2025-06-30T15:51:34.47Z" }, + { url = "https://files.pythonhosted.org/packages/a8/62/279c13d584207d5697a752a66ffc9bb19355a95f7659140cb1b3cf82180e/multidict-6.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:cdf22e4db76d323bcdc733514bf732e9fb349707c98d341d40ebcc6e9318ef3d", size = 243102, upload-time = "2025-06-30T15:51:36.525Z" }, + { url = "https://files.pythonhosted.org/packages/69/cc/e06636f48c6d51e724a8bc8d9e1db5f136fe1df066d7cafe37ef4000f86a/multidict-6.6.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e995a34c3d44ab511bfc11aa26869b9d66c2d8c799fa0e74b28a473a692532d6", size = 238693, upload-time = "2025-06-30T15:51:38.278Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/66c9d8fb9acf3b226cdd468ed009537ac65b520aebdc1703dd6908b19d33/multidict-6.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:766a4a5996f54361d8d5a9050140aa5362fe48ce51c755a50c0bc3706460c430", size = 246582, upload-time = "2025-06-30T15:51:39.709Z" }, + { url = "https://files.pythonhosted.org/packages/cf/01/c69e0317be556e46257826d5449feb4e6aa0d18573e567a48a2c14156f1f/multidict-6.6.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3893a0d7d28a7fe6ca7a1f760593bc13038d1d35daf52199d431b61d2660602b", size = 253355, upload-time = "2025-06-30T15:51:41.013Z" }, + { url = "https://files.pythonhosted.org/packages/c0/da/9cc1da0299762d20e626fe0042e71b5694f9f72d7d3f9678397cbaa71b2b/multidict-6.6.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:934796c81ea996e61914ba58064920d6cad5d99140ac3167901eb932150e2e56", size = 247774, upload-time = "2025-06-30T15:51:42.291Z" }, + { url = "https://files.pythonhosted.org/packages/e6/91/b22756afec99cc31105ddd4a52f95ab32b1a4a58f4d417979c570c4a922e/multidict-6.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9ed948328aec2072bc00f05d961ceadfd3e9bfc2966c1319aeaf7b7c21219183", size = 242275, upload-time = "2025-06-30T15:51:43.642Z" }, + { url = "https://files.pythonhosted.org/packages/be/f1/adcc185b878036a20399d5be5228f3cbe7f823d78985d101d425af35c800/multidict-6.6.3-cp311-cp311-win32.whl", hash = "sha256:9f5b28c074c76afc3e4c610c488e3493976fe0e596dd3db6c8ddfbb0134dcac5", size = 41290, upload-time = "2025-06-30T15:51:45.264Z" }, + { url = "https://files.pythonhosted.org/packages/e0/d4/27652c1c6526ea6b4f5ddd397e93f4232ff5de42bea71d339bc6a6cc497f/multidict-6.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:bc7f6fbc61b1c16050a389c630da0b32fc6d4a3d191394ab78972bf5edc568c2", size = 45942, upload-time = "2025-06-30T15:51:46.377Z" }, + { url = "https://files.pythonhosted.org/packages/16/18/23f4932019804e56d3c2413e237f866444b774b0263bcb81df2fdecaf593/multidict-6.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:d4e47d8faffaae822fb5cba20937c048d4f734f43572e7079298a6c39fb172cb", size = 42880, upload-time = "2025-06-30T15:51:47.561Z" }, + { url = "https://files.pythonhosted.org/packages/0e/a0/6b57988ea102da0623ea814160ed78d45a2645e4bbb499c2896d12833a70/multidict-6.6.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:056bebbeda16b2e38642d75e9e5310c484b7c24e3841dc0fb943206a72ec89d6", size = 76514, upload-time = "2025-06-30T15:51:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/07/7a/d1e92665b0850c6c0508f101f9cf0410c1afa24973e1115fe9c6a185ebf7/multidict-6.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e5f481cccb3c5c5e5de5d00b5141dc589c1047e60d07e85bbd7dea3d4580d63f", size = 45394, upload-time = "2025-06-30T15:51:49.986Z" }, + { url = "https://files.pythonhosted.org/packages/52/6f/dd104490e01be6ef8bf9573705d8572f8c2d2c561f06e3826b081d9e6591/multidict-6.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:10bea2ee839a759ee368b5a6e47787f399b41e70cf0c20d90dfaf4158dfb4e55", size = 43590, upload-time = "2025-06-30T15:51:51.331Z" }, + { url = "https://files.pythonhosted.org/packages/44/fe/06e0e01b1b0611e6581b7fd5a85b43dacc08b6cea3034f902f383b0873e5/multidict-6.6.3-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:2334cfb0fa9549d6ce2c21af2bfbcd3ac4ec3646b1b1581c88e3e2b1779ec92b", size = 237292, upload-time = "2025-06-30T15:51:52.584Z" }, + { url = "https://files.pythonhosted.org/packages/ce/71/4f0e558fb77696b89c233c1ee2d92f3e1d5459070a0e89153c9e9e804186/multidict-6.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8fee016722550a2276ca2cb5bb624480e0ed2bd49125b2b73b7010b9090e888", size = 258385, upload-time = "2025-06-30T15:51:53.913Z" }, + { url = "https://files.pythonhosted.org/packages/e3/25/cca0e68228addad24903801ed1ab42e21307a1b4b6dd2cf63da5d3ae082a/multidict-6.6.3-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5511cb35f5c50a2db21047c875eb42f308c5583edf96bd8ebf7d770a9d68f6d", size = 242328, upload-time = "2025-06-30T15:51:55.672Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a3/46f2d420d86bbcb8fe660b26a10a219871a0fbf4d43cb846a4031533f3e0/multidict-6.6.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:712b348f7f449948e0a6c4564a21c7db965af900973a67db432d724619b3c680", size = 268057, upload-time = "2025-06-30T15:51:57.037Z" }, + { url = "https://files.pythonhosted.org/packages/9e/73/1c743542fe00794a2ec7466abd3f312ccb8fad8dff9f36d42e18fb1ec33e/multidict-6.6.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e4e15d2138ee2694e038e33b7c3da70e6b0ad8868b9f8094a72e1414aeda9c1a", size = 269341, upload-time = "2025-06-30T15:51:59.111Z" }, + { url = "https://files.pythonhosted.org/packages/a4/11/6ec9dcbe2264b92778eeb85407d1df18812248bf3506a5a1754bc035db0c/multidict-6.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8df25594989aebff8a130f7899fa03cbfcc5d2b5f4a461cf2518236fe6f15961", size = 256081, upload-time = "2025-06-30T15:52:00.533Z" }, + { url = "https://files.pythonhosted.org/packages/9b/2b/631b1e2afeb5f1696846d747d36cda075bfdc0bc7245d6ba5c319278d6c4/multidict-6.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:159ca68bfd284a8860f8d8112cf0521113bffd9c17568579e4d13d1f1dc76b65", size = 253581, upload-time = "2025-06-30T15:52:02.43Z" }, + { url = "https://files.pythonhosted.org/packages/bf/0e/7e3b93f79efeb6111d3bf9a1a69e555ba1d07ad1c11bceb56b7310d0d7ee/multidict-6.6.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e098c17856a8c9ade81b4810888c5ad1914099657226283cab3062c0540b0643", size = 250750, upload-time = "2025-06-30T15:52:04.26Z" }, + { url = "https://files.pythonhosted.org/packages/ad/9e/086846c1d6601948e7de556ee464a2d4c85e33883e749f46b9547d7b0704/multidict-6.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:67c92ed673049dec52d7ed39f8cf9ebbadf5032c774058b4406d18c8f8fe7063", size = 251548, upload-time = "2025-06-30T15:52:06.002Z" }, + { url = "https://files.pythonhosted.org/packages/8c/7b/86ec260118e522f1a31550e87b23542294880c97cfbf6fb18cc67b044c66/multidict-6.6.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:bd0578596e3a835ef451784053cfd327d607fc39ea1a14812139339a18a0dbc3", size = 262718, upload-time = "2025-06-30T15:52:07.707Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bd/22ce8f47abb0be04692c9fc4638508b8340987b18691aa7775d927b73f72/multidict-6.6.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:346055630a2df2115cd23ae271910b4cae40f4e336773550dca4889b12916e75", size = 259603, upload-time = "2025-06-30T15:52:09.58Z" }, + { url = "https://files.pythonhosted.org/packages/07/9c/91b7ac1691be95cd1f4a26e36a74b97cda6aa9820632d31aab4410f46ebd/multidict-6.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:555ff55a359302b79de97e0468e9ee80637b0de1fce77721639f7cd9440b3a10", size = 251351, upload-time = "2025-06-30T15:52:10.947Z" }, + { url = "https://files.pythonhosted.org/packages/6f/5c/4d7adc739884f7a9fbe00d1eac8c034023ef8bad71f2ebe12823ca2e3649/multidict-6.6.3-cp312-cp312-win32.whl", hash = "sha256:73ab034fb8d58ff85c2bcbadc470efc3fafeea8affcf8722855fb94557f14cc5", size = 41860, upload-time = "2025-06-30T15:52:12.334Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a3/0fbc7afdf7cb1aa12a086b02959307848eb6bcc8f66fcb66c0cb57e2a2c1/multidict-6.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:04cbcce84f63b9af41bad04a54d4cc4e60e90c35b9e6ccb130be2d75b71f8c17", size = 45982, upload-time = "2025-06-30T15:52:13.6Z" }, + { url = "https://files.pythonhosted.org/packages/b8/95/8c825bd70ff9b02462dc18d1295dd08d3e9e4eb66856d292ffa62cfe1920/multidict-6.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:0f1130b896ecb52d2a1e615260f3ea2af55fa7dc3d7c3003ba0c3121a759b18b", size = 43210, upload-time = "2025-06-30T15:52:14.893Z" }, + { url = "https://files.pythonhosted.org/packages/52/1d/0bebcbbb4f000751fbd09957257903d6e002943fc668d841a4cf2fb7f872/multidict-6.6.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:540d3c06d48507357a7d57721e5094b4f7093399a0106c211f33540fdc374d55", size = 75843, upload-time = "2025-06-30T15:52:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/07/8f/cbe241b0434cfe257f65c2b1bcf9e8d5fb52bc708c5061fb29b0fed22bdf/multidict-6.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9c19cea2a690f04247d43f366d03e4eb110a0dc4cd1bbeee4d445435428ed35b", size = 45053, upload-time = "2025-06-30T15:52:17.429Z" }, + { url = "https://files.pythonhosted.org/packages/32/d2/0b3b23f9dbad5b270b22a3ac3ea73ed0a50ef2d9a390447061178ed6bdb8/multidict-6.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7af039820cfd00effec86bda5d8debef711a3e86a1d3772e85bea0f243a4bd65", size = 43273, upload-time = "2025-06-30T15:52:19.346Z" }, + { url = "https://files.pythonhosted.org/packages/fd/fe/6eb68927e823999e3683bc49678eb20374ba9615097d085298fd5b386564/multidict-6.6.3-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:500b84f51654fdc3944e936f2922114349bf8fdcac77c3092b03449f0e5bc2b3", size = 237124, upload-time = "2025-06-30T15:52:20.773Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/320d8507e7726c460cb77117848b3834ea0d59e769f36fdae495f7669929/multidict-6.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f3fc723ab8a5c5ed6c50418e9bfcd8e6dceba6c271cee6728a10a4ed8561520c", size = 256892, upload-time = "2025-06-30T15:52:22.242Z" }, + { url = "https://files.pythonhosted.org/packages/76/60/38ee422db515ac69834e60142a1a69111ac96026e76e8e9aa347fd2e4591/multidict-6.6.3-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:94c47ea3ade005b5976789baaed66d4de4480d0a0bf31cef6edaa41c1e7b56a6", size = 240547, upload-time = "2025-06-30T15:52:23.736Z" }, + { url = "https://files.pythonhosted.org/packages/27/fb/905224fde2dff042b030c27ad95a7ae744325cf54b890b443d30a789b80e/multidict-6.6.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:dbc7cf464cc6d67e83e136c9f55726da3a30176f020a36ead246eceed87f1cd8", size = 266223, upload-time = "2025-06-30T15:52:25.185Z" }, + { url = "https://files.pythonhosted.org/packages/76/35/dc38ab361051beae08d1a53965e3e1a418752fc5be4d3fb983c5582d8784/multidict-6.6.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:900eb9f9da25ada070f8ee4a23f884e0ee66fe4e1a38c3af644256a508ad81ca", size = 267262, upload-time = "2025-06-30T15:52:26.969Z" }, + { url = "https://files.pythonhosted.org/packages/1f/a3/0a485b7f36e422421b17e2bbb5a81c1af10eac1d4476f2ff92927c730479/multidict-6.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7c6df517cf177da5d47ab15407143a89cd1a23f8b335f3a28d57e8b0a3dbb884", size = 254345, upload-time = "2025-06-30T15:52:28.467Z" }, + { url = "https://files.pythonhosted.org/packages/b4/59/bcdd52c1dab7c0e0d75ff19cac751fbd5f850d1fc39172ce809a74aa9ea4/multidict-6.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4ef421045f13879e21c994b36e728d8e7d126c91a64b9185810ab51d474f27e7", size = 252248, upload-time = "2025-06-30T15:52:29.938Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a4/2d96aaa6eae8067ce108d4acee6f45ced5728beda55c0f02ae1072c730d1/multidict-6.6.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:6c1e61bb4f80895c081790b6b09fa49e13566df8fbff817da3f85b3a8192e36b", size = 250115, upload-time = "2025-06-30T15:52:31.416Z" }, + { url = "https://files.pythonhosted.org/packages/25/d2/ed9f847fa5c7d0677d4f02ea2c163d5e48573de3f57bacf5670e43a5ffaa/multidict-6.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e5e8523bb12d7623cd8300dbd91b9e439a46a028cd078ca695eb66ba31adee3c", size = 249649, upload-time = "2025-06-30T15:52:32.996Z" }, + { url = "https://files.pythonhosted.org/packages/1f/af/9155850372563fc550803d3f25373308aa70f59b52cff25854086ecb4a79/multidict-6.6.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:ef58340cc896219e4e653dade08fea5c55c6df41bcc68122e3be3e9d873d9a7b", size = 261203, upload-time = "2025-06-30T15:52:34.521Z" }, + { url = "https://files.pythonhosted.org/packages/36/2f/c6a728f699896252cf309769089568a33c6439626648843f78743660709d/multidict-6.6.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fc9dc435ec8699e7b602b94fe0cd4703e69273a01cbc34409af29e7820f777f1", size = 258051, upload-time = "2025-06-30T15:52:35.999Z" }, + { url = "https://files.pythonhosted.org/packages/d0/60/689880776d6b18fa2b70f6cc74ff87dd6c6b9b47bd9cf74c16fecfaa6ad9/multidict-6.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9e864486ef4ab07db5e9cb997bad2b681514158d6954dd1958dfb163b83d53e6", size = 249601, upload-time = "2025-06-30T15:52:37.473Z" }, + { url = "https://files.pythonhosted.org/packages/75/5e/325b11f2222a549019cf2ef879c1f81f94a0d40ace3ef55cf529915ba6cc/multidict-6.6.3-cp313-cp313-win32.whl", hash = "sha256:5633a82fba8e841bc5c5c06b16e21529573cd654f67fd833650a215520a6210e", size = 41683, upload-time = "2025-06-30T15:52:38.927Z" }, + { url = "https://files.pythonhosted.org/packages/b1/ad/cf46e73f5d6e3c775cabd2a05976547f3f18b39bee06260369a42501f053/multidict-6.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:e93089c1570a4ad54c3714a12c2cef549dc9d58e97bcded193d928649cab78e9", size = 45811, upload-time = "2025-06-30T15:52:40.207Z" }, + { url = "https://files.pythonhosted.org/packages/c5/c9/2e3fe950db28fb7c62e1a5f46e1e38759b072e2089209bc033c2798bb5ec/multidict-6.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:c60b401f192e79caec61f166da9c924e9f8bc65548d4246842df91651e83d600", size = 43056, upload-time = "2025-06-30T15:52:41.575Z" }, + { url = "https://files.pythonhosted.org/packages/3a/58/aaf8114cf34966e084a8cc9517771288adb53465188843d5a19862cb6dc3/multidict-6.6.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:02fd8f32d403a6ff13864b0851f1f523d4c988051eea0471d4f1fd8010f11134", size = 82811, upload-time = "2025-06-30T15:52:43.281Z" }, + { url = "https://files.pythonhosted.org/packages/71/af/5402e7b58a1f5b987a07ad98f2501fdba2a4f4b4c30cf114e3ce8db64c87/multidict-6.6.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f3aa090106b1543f3f87b2041eef3c156c8da2aed90c63a2fbed62d875c49c37", size = 48304, upload-time = "2025-06-30T15:52:45.026Z" }, + { url = "https://files.pythonhosted.org/packages/39/65/ab3c8cafe21adb45b24a50266fd747147dec7847425bc2a0f6934b3ae9ce/multidict-6.6.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e924fb978615a5e33ff644cc42e6aa241effcf4f3322c09d4f8cebde95aff5f8", size = 46775, upload-time = "2025-06-30T15:52:46.459Z" }, + { url = "https://files.pythonhosted.org/packages/49/ba/9fcc1b332f67cc0c0c8079e263bfab6660f87fe4e28a35921771ff3eea0d/multidict-6.6.3-cp313-cp313t-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:b9fe5a0e57c6dbd0e2ce81ca66272282c32cd11d31658ee9553849d91289e1c1", size = 229773, upload-time = "2025-06-30T15:52:47.88Z" }, + { url = "https://files.pythonhosted.org/packages/a4/14/0145a251f555f7c754ce2dcbcd012939bbd1f34f066fa5d28a50e722a054/multidict-6.6.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b24576f208793ebae00280c59927c3b7c2a3b1655e443a25f753c4611bc1c373", size = 250083, upload-time = "2025-06-30T15:52:49.366Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d4/d5c0bd2bbb173b586c249a151a26d2fb3ec7d53c96e42091c9fef4e1f10c/multidict-6.6.3-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:135631cb6c58eac37d7ac0df380294fecdc026b28837fa07c02e459c7fb9c54e", size = 228980, upload-time = "2025-06-30T15:52:50.903Z" }, + { url = "https://files.pythonhosted.org/packages/21/32/c9a2d8444a50ec48c4733ccc67254100c10e1c8ae8e40c7a2d2183b59b97/multidict-6.6.3-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:274d416b0df887aef98f19f21578653982cfb8a05b4e187d4a17103322eeaf8f", size = 257776, upload-time = "2025-06-30T15:52:52.764Z" }, + { url = "https://files.pythonhosted.org/packages/68/d0/14fa1699f4ef629eae08ad6201c6b476098f5efb051b296f4c26be7a9fdf/multidict-6.6.3-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e252017a817fad7ce05cafbe5711ed40faeb580e63b16755a3a24e66fa1d87c0", size = 256882, upload-time = "2025-06-30T15:52:54.596Z" }, + { url = "https://files.pythonhosted.org/packages/da/88/84a27570fbe303c65607d517a5f147cd2fc046c2d1da02b84b17b9bdc2aa/multidict-6.6.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2e4cc8d848cd4fe1cdee28c13ea79ab0ed37fc2e89dd77bac86a2e7959a8c3bc", size = 247816, upload-time = "2025-06-30T15:52:56.175Z" }, + { url = "https://files.pythonhosted.org/packages/1c/60/dca352a0c999ce96a5d8b8ee0b2b9f729dcad2e0b0c195f8286269a2074c/multidict-6.6.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9e236a7094b9c4c1b7585f6b9cca34b9d833cf079f7e4c49e6a4a6ec9bfdc68f", size = 245341, upload-time = "2025-06-30T15:52:57.752Z" }, + { url = "https://files.pythonhosted.org/packages/50/ef/433fa3ed06028f03946f3993223dada70fb700f763f70c00079533c34578/multidict-6.6.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e0cb0ab69915c55627c933f0b555a943d98ba71b4d1c57bc0d0a66e2567c7471", size = 235854, upload-time = "2025-06-30T15:52:59.74Z" }, + { url = "https://files.pythonhosted.org/packages/1b/1f/487612ab56fbe35715320905215a57fede20de7db40a261759690dc80471/multidict-6.6.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:81ef2f64593aba09c5212a3d0f8c906a0d38d710a011f2f42759704d4557d3f2", size = 243432, upload-time = "2025-06-30T15:53:01.602Z" }, + { url = "https://files.pythonhosted.org/packages/da/6f/ce8b79de16cd885c6f9052c96a3671373d00c59b3ee635ea93e6e81b8ccf/multidict-6.6.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:b9cbc60010de3562545fa198bfc6d3825df430ea96d2cc509c39bd71e2e7d648", size = 252731, upload-time = "2025-06-30T15:53:03.517Z" }, + { url = "https://files.pythonhosted.org/packages/bb/fe/a2514a6aba78e5abefa1624ca85ae18f542d95ac5cde2e3815a9fbf369aa/multidict-6.6.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:70d974eaaa37211390cd02ef93b7e938de564bbffa866f0b08d07e5e65da783d", size = 247086, upload-time = "2025-06-30T15:53:05.48Z" }, + { url = "https://files.pythonhosted.org/packages/8c/22/b788718d63bb3cce752d107a57c85fcd1a212c6c778628567c9713f9345a/multidict-6.6.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3713303e4a6663c6d01d648a68f2848701001f3390a030edaaf3fc949c90bf7c", size = 243338, upload-time = "2025-06-30T15:53:07.522Z" }, + { url = "https://files.pythonhosted.org/packages/22/d6/fdb3d0670819f2228f3f7d9af613d5e652c15d170c83e5f1c94fbc55a25b/multidict-6.6.3-cp313-cp313t-win32.whl", hash = "sha256:639ecc9fe7cd73f2495f62c213e964843826f44505a3e5d82805aa85cac6f89e", size = 47812, upload-time = "2025-06-30T15:53:09.263Z" }, + { url = "https://files.pythonhosted.org/packages/b6/d6/a9d2c808f2c489ad199723197419207ecbfbc1776f6e155e1ecea9c883aa/multidict-6.6.3-cp313-cp313t-win_amd64.whl", hash = "sha256:9f97e181f344a0ef3881b573d31de8542cc0dbc559ec68c8f8b5ce2c2e91646d", size = 53011, upload-time = "2025-06-30T15:53:11.038Z" }, + { url = "https://files.pythonhosted.org/packages/f2/40/b68001cba8188dd267590a111f9661b6256debc327137667e832bf5d66e8/multidict-6.6.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ce8b7693da41a3c4fde5871c738a81490cea5496c671d74374c8ab889e1834fb", size = 45254, upload-time = "2025-06-30T15:53:12.421Z" }, + { url = "https://files.pythonhosted.org/packages/d8/30/9aec301e9772b098c1f5c0ca0279237c9766d94b97802e9888010c64b0ed/multidict-6.6.3-py3-none-any.whl", hash = "sha256:8db10f29c7541fc5da4defd8cd697e1ca429db743fa716325f236079b96f775a", size = 12313, upload-time = "2025-06-30T15:53:45.437Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + [[package]] name = "numpy" version = "2.2.6" @@ -964,6 +1486,24 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444", size = 13189044, upload-time = "2025-07-07T19:19:39.999Z" }, ] +[[package]] +name = "platformdirs" +version = "4.3.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + [[package]] name = "polars" version = "1.31.0" @@ -983,6 +1523,111 @@ pyarrow = [ { name = "pyarrow" }, ] +[[package]] +name = "pre-commit" +version = "4.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/39/679ca9b26c7bb2999ff122d50faa301e49af82ca9c066ec061cfbc0c6784/pre_commit-4.2.0.tar.gz", hash = "sha256:601283b9757afd87d40c4c4a9b2b5de9637a8ea02eaff7adc2d0fb4e04841146", size = 193424, upload-time = "2025-03-18T21:35:20.987Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/74/a88bf1b1efeae488a0c0b7bdf71429c313722d1fc0f377537fbe554e6180/pre_commit-4.2.0-py2.py3-none-any.whl", hash = "sha256:a009ca7205f1eb497d10b845e52c838a98b6cdd2102a6c8e4540e94ee75c58bd", size = 220707, upload-time = "2025-03-18T21:35:19.343Z" }, +] + +[[package]] +name = "propcache" +version = "0.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a6/16/43264e4a779dd8588c21a70f0709665ee8f611211bdd2c87d952cfa7c776/propcache-0.3.2.tar.gz", hash = "sha256:20d7d62e4e7ef05f221e0db2856b979540686342e7dd9973b815599c7057e168", size = 44139, upload-time = "2025-06-09T22:56:06.081Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/14/510deed325e262afeb8b360043c5d7c960da7d3ecd6d6f9496c9c56dc7f4/propcache-0.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:22d9962a358aedbb7a2e36187ff273adeaab9743373a272976d2e348d08c7770", size = 73178, upload-time = "2025-06-09T22:53:40.126Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4e/ad52a7925ff01c1325653a730c7ec3175a23f948f08626a534133427dcff/propcache-0.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d0fda578d1dc3f77b6b5a5dce3b9ad69a8250a891760a548df850a5e8da87f3", size = 43133, upload-time = "2025-06-09T22:53:41.965Z" }, + { url = "https://files.pythonhosted.org/packages/63/7c/e9399ba5da7780871db4eac178e9c2e204c23dd3e7d32df202092a1ed400/propcache-0.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3def3da3ac3ce41562d85db655d18ebac740cb3fa4367f11a52b3da9d03a5cc3", size = 43039, upload-time = "2025-06-09T22:53:43.268Z" }, + { url = "https://files.pythonhosted.org/packages/22/e1/58da211eb8fdc6fc854002387d38f415a6ca5f5c67c1315b204a5d3e9d7a/propcache-0.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bec58347a5a6cebf239daba9bda37dffec5b8d2ce004d9fe4edef3d2815137e", size = 201903, upload-time = "2025-06-09T22:53:44.872Z" }, + { url = "https://files.pythonhosted.org/packages/c4/0a/550ea0f52aac455cb90111c8bab995208443e46d925e51e2f6ebdf869525/propcache-0.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55ffda449a507e9fbd4aca1a7d9aa6753b07d6166140e5a18d2ac9bc49eac220", size = 213362, upload-time = "2025-06-09T22:53:46.707Z" }, + { url = "https://files.pythonhosted.org/packages/5a/af/9893b7d878deda9bb69fcf54600b247fba7317761b7db11fede6e0f28bd0/propcache-0.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a67fb39229a8a8491dd42f864e5e263155e729c2e7ff723d6e25f596b1e8cb", size = 210525, upload-time = "2025-06-09T22:53:48.547Z" }, + { url = "https://files.pythonhosted.org/packages/7c/bb/38fd08b278ca85cde36d848091ad2b45954bc5f15cce494bb300b9285831/propcache-0.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da1cf97b92b51253d5b68cf5a2b9e0dafca095e36b7f2da335e27dc6172a614", size = 198283, upload-time = "2025-06-09T22:53:50.067Z" }, + { url = "https://files.pythonhosted.org/packages/78/8c/9fe55bd01d362bafb413dfe508c48753111a1e269737fa143ba85693592c/propcache-0.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f559e127134b07425134b4065be45b166183fdcb433cb6c24c8e4149056ad50", size = 191872, upload-time = "2025-06-09T22:53:51.438Z" }, + { url = "https://files.pythonhosted.org/packages/54/14/4701c33852937a22584e08abb531d654c8bcf7948a8f87ad0a4822394147/propcache-0.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:aff2e4e06435d61f11a428360a932138d0ec288b0a31dd9bd78d200bd4a2b339", size = 199452, upload-time = "2025-06-09T22:53:53.229Z" }, + { url = "https://files.pythonhosted.org/packages/16/44/447f2253d859602095356007657ee535e0093215ea0b3d1d6a41d16e5201/propcache-0.3.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:4927842833830942a5d0a56e6f4839bc484785b8e1ce8d287359794818633ba0", size = 191567, upload-time = "2025-06-09T22:53:54.541Z" }, + { url = "https://files.pythonhosted.org/packages/f2/b3/e4756258749bb2d3b46defcff606a2f47410bab82be5824a67e84015b267/propcache-0.3.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6107ddd08b02654a30fb8ad7a132021759d750a82578b94cd55ee2772b6ebea2", size = 193015, upload-time = "2025-06-09T22:53:56.44Z" }, + { url = "https://files.pythonhosted.org/packages/1e/df/e6d3c7574233164b6330b9fd697beeac402afd367280e6dc377bb99b43d9/propcache-0.3.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:70bd8b9cd6b519e12859c99f3fc9a93f375ebd22a50296c3a295028bea73b9e7", size = 204660, upload-time = "2025-06-09T22:53:57.839Z" }, + { url = "https://files.pythonhosted.org/packages/b2/53/e4d31dd5170b4a0e2e6b730f2385a96410633b4833dc25fe5dffd1f73294/propcache-0.3.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2183111651d710d3097338dd1893fcf09c9f54e27ff1a8795495a16a469cc90b", size = 206105, upload-time = "2025-06-09T22:53:59.638Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fe/74d54cf9fbe2a20ff786e5f7afcfde446588f0cf15fb2daacfbc267b866c/propcache-0.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:fb075ad271405dcad8e2a7ffc9a750a3bf70e533bd86e89f0603e607b93aa64c", size = 196980, upload-time = "2025-06-09T22:54:01.071Z" }, + { url = "https://files.pythonhosted.org/packages/22/ec/c469c9d59dada8a7679625e0440b544fe72e99311a4679c279562051f6fc/propcache-0.3.2-cp310-cp310-win32.whl", hash = "sha256:404d70768080d3d3bdb41d0771037da19d8340d50b08e104ca0e7f9ce55fce70", size = 37679, upload-time = "2025-06-09T22:54:03.003Z" }, + { url = "https://files.pythonhosted.org/packages/38/35/07a471371ac89d418f8d0b699c75ea6dca2041fbda360823de21f6a9ce0a/propcache-0.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:7435d766f978b4ede777002e6b3b6641dd229cd1da8d3d3106a45770365f9ad9", size = 41459, upload-time = "2025-06-09T22:54:04.134Z" }, + { url = "https://files.pythonhosted.org/packages/80/8d/e8b436717ab9c2cfc23b116d2c297305aa4cd8339172a456d61ebf5669b8/propcache-0.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0b8d2f607bd8f80ddc04088bc2a037fdd17884a6fcadc47a96e334d72f3717be", size = 74207, upload-time = "2025-06-09T22:54:05.399Z" }, + { url = "https://files.pythonhosted.org/packages/d6/29/1e34000e9766d112171764b9fa3226fa0153ab565d0c242c70e9945318a7/propcache-0.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06766d8f34733416e2e34f46fea488ad5d60726bb9481d3cddf89a6fa2d9603f", size = 43648, upload-time = "2025-06-09T22:54:08.023Z" }, + { url = "https://files.pythonhosted.org/packages/46/92/1ad5af0df781e76988897da39b5f086c2bf0f028b7f9bd1f409bb05b6874/propcache-0.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a2dc1f4a1df4fecf4e6f68013575ff4af84ef6f478fe5344317a65d38a8e6dc9", size = 43496, upload-time = "2025-06-09T22:54:09.228Z" }, + { url = "https://files.pythonhosted.org/packages/b3/ce/e96392460f9fb68461fabab3e095cb00c8ddf901205be4eae5ce246e5b7e/propcache-0.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be29c4f4810c5789cf10ddf6af80b041c724e629fa51e308a7a0fb19ed1ef7bf", size = 217288, upload-time = "2025-06-09T22:54:10.466Z" }, + { url = "https://files.pythonhosted.org/packages/c5/2a/866726ea345299f7ceefc861a5e782b045545ae6940851930a6adaf1fca6/propcache-0.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59d61f6970ecbd8ff2e9360304d5c8876a6abd4530cb752c06586849ac8a9dc9", size = 227456, upload-time = "2025-06-09T22:54:11.828Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/07d992ccb6d930398689187e1b3c718339a1c06b8b145a8d9650e4726166/propcache-0.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62180e0b8dbb6b004baec00a7983e4cc52f5ada9cd11f48c3528d8cfa7b96a66", size = 225429, upload-time = "2025-06-09T22:54:13.823Z" }, + { url = "https://files.pythonhosted.org/packages/5d/e6/116ba39448753b1330f48ab8ba927dcd6cf0baea8a0ccbc512dfb49ba670/propcache-0.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c144ca294a204c470f18cf4c9d78887810d04a3e2fbb30eea903575a779159df", size = 213472, upload-time = "2025-06-09T22:54:15.232Z" }, + { url = "https://files.pythonhosted.org/packages/a6/85/f01f5d97e54e428885a5497ccf7f54404cbb4f906688a1690cd51bf597dc/propcache-0.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5c2a784234c28854878d68978265617aa6dc0780e53d44b4d67f3651a17a9a2", size = 204480, upload-time = "2025-06-09T22:54:17.104Z" }, + { url = "https://files.pythonhosted.org/packages/e3/79/7bf5ab9033b8b8194cc3f7cf1aaa0e9c3256320726f64a3e1f113a812dce/propcache-0.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5745bc7acdafa978ca1642891b82c19238eadc78ba2aaa293c6863b304e552d7", size = 214530, upload-time = "2025-06-09T22:54:18.512Z" }, + { url = "https://files.pythonhosted.org/packages/31/0b/bd3e0c00509b609317df4a18e6b05a450ef2d9a963e1d8bc9c9415d86f30/propcache-0.3.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:c0075bf773d66fa8c9d41f66cc132ecc75e5bb9dd7cce3cfd14adc5ca184cb95", size = 205230, upload-time = "2025-06-09T22:54:19.947Z" }, + { url = "https://files.pythonhosted.org/packages/7a/23/fae0ff9b54b0de4e819bbe559508da132d5683c32d84d0dc2ccce3563ed4/propcache-0.3.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5f57aa0847730daceff0497f417c9de353c575d8da3579162cc74ac294c5369e", size = 206754, upload-time = "2025-06-09T22:54:21.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/7f/ad6a3c22630aaa5f618b4dc3c3598974a72abb4c18e45a50b3cdd091eb2f/propcache-0.3.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:eef914c014bf72d18efb55619447e0aecd5fb7c2e3fa7441e2e5d6099bddff7e", size = 218430, upload-time = "2025-06-09T22:54:23.17Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2c/ba4f1c0e8a4b4c75910742f0d333759d441f65a1c7f34683b4a74c0ee015/propcache-0.3.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:2a4092e8549031e82facf3decdbc0883755d5bbcc62d3aea9d9e185549936dcf", size = 223884, upload-time = "2025-06-09T22:54:25.539Z" }, + { url = "https://files.pythonhosted.org/packages/88/e4/ebe30fc399e98572019eee82ad0caf512401661985cbd3da5e3140ffa1b0/propcache-0.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:85871b050f174bc0bfb437efbdb68aaf860611953ed12418e4361bc9c392749e", size = 211480, upload-time = "2025-06-09T22:54:26.892Z" }, + { url = "https://files.pythonhosted.org/packages/96/0a/7d5260b914e01d1d0906f7f38af101f8d8ed0dc47426219eeaf05e8ea7c2/propcache-0.3.2-cp311-cp311-win32.whl", hash = "sha256:36c8d9b673ec57900c3554264e630d45980fd302458e4ac801802a7fd2ef7897", size = 37757, upload-time = "2025-06-09T22:54:28.241Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2d/89fe4489a884bc0da0c3278c552bd4ffe06a1ace559db5ef02ef24ab446b/propcache-0.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53af8cb6a781b02d2ea079b5b853ba9430fcbe18a8e3ce647d5982a3ff69f39", size = 41500, upload-time = "2025-06-09T22:54:29.4Z" }, + { url = "https://files.pythonhosted.org/packages/a8/42/9ca01b0a6f48e81615dca4765a8f1dd2c057e0540f6116a27dc5ee01dfb6/propcache-0.3.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8de106b6c84506b31c27168582cd3cb3000a6412c16df14a8628e5871ff83c10", size = 73674, upload-time = "2025-06-09T22:54:30.551Z" }, + { url = "https://files.pythonhosted.org/packages/af/6e/21293133beb550f9c901bbece755d582bfaf2176bee4774000bd4dd41884/propcache-0.3.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:28710b0d3975117239c76600ea351934ac7b5ff56e60953474342608dbbb6154", size = 43570, upload-time = "2025-06-09T22:54:32.296Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c8/0393a0a3a2b8760eb3bde3c147f62b20044f0ddac81e9d6ed7318ec0d852/propcache-0.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce26862344bdf836650ed2487c3d724b00fbfec4233a1013f597b78c1cb73615", size = 43094, upload-time = "2025-06-09T22:54:33.929Z" }, + { url = "https://files.pythonhosted.org/packages/37/2c/489afe311a690399d04a3e03b069225670c1d489eb7b044a566511c1c498/propcache-0.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bca54bd347a253af2cf4544bbec232ab982f4868de0dd684246b67a51bc6b1db", size = 226958, upload-time = "2025-06-09T22:54:35.186Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ca/63b520d2f3d418c968bf596839ae26cf7f87bead026b6192d4da6a08c467/propcache-0.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55780d5e9a2ddc59711d727226bb1ba83a22dd32f64ee15594b9392b1f544eb1", size = 234894, upload-time = "2025-06-09T22:54:36.708Z" }, + { url = "https://files.pythonhosted.org/packages/11/60/1d0ed6fff455a028d678df30cc28dcee7af77fa2b0e6962ce1df95c9a2a9/propcache-0.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:035e631be25d6975ed87ab23153db6a73426a48db688070d925aa27e996fe93c", size = 233672, upload-time = "2025-06-09T22:54:38.062Z" }, + { url = "https://files.pythonhosted.org/packages/37/7c/54fd5301ef38505ab235d98827207176a5c9b2aa61939b10a460ca53e123/propcache-0.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee6f22b6eaa39297c751d0e80c0d3a454f112f5c6481214fcf4c092074cecd67", size = 224395, upload-time = "2025-06-09T22:54:39.634Z" }, + { url = "https://files.pythonhosted.org/packages/ee/1a/89a40e0846f5de05fdc6779883bf46ba980e6df4d2ff8fb02643de126592/propcache-0.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ca3aee1aa955438c4dba34fc20a9f390e4c79967257d830f137bd5a8a32ed3b", size = 212510, upload-time = "2025-06-09T22:54:41.565Z" }, + { url = "https://files.pythonhosted.org/packages/5e/33/ca98368586c9566a6b8d5ef66e30484f8da84c0aac3f2d9aec6d31a11bd5/propcache-0.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7a4f30862869fa2b68380d677cc1c5fcf1e0f2b9ea0cf665812895c75d0ca3b8", size = 222949, upload-time = "2025-06-09T22:54:43.038Z" }, + { url = "https://files.pythonhosted.org/packages/ba/11/ace870d0aafe443b33b2f0b7efdb872b7c3abd505bfb4890716ad7865e9d/propcache-0.3.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b77ec3c257d7816d9f3700013639db7491a434644c906a2578a11daf13176251", size = 217258, upload-time = "2025-06-09T22:54:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d2/86fd6f7adffcfc74b42c10a6b7db721d1d9ca1055c45d39a1a8f2a740a21/propcache-0.3.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cab90ac9d3f14b2d5050928483d3d3b8fb6b4018893fc75710e6aa361ecb2474", size = 213036, upload-time = "2025-06-09T22:54:46.243Z" }, + { url = "https://files.pythonhosted.org/packages/07/94/2d7d1e328f45ff34a0a284cf5a2847013701e24c2a53117e7c280a4316b3/propcache-0.3.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0b504d29f3c47cf6b9e936c1852246c83d450e8e063d50562115a6be6d3a2535", size = 227684, upload-time = "2025-06-09T22:54:47.63Z" }, + { url = "https://files.pythonhosted.org/packages/b7/05/37ae63a0087677e90b1d14710e532ff104d44bc1efa3b3970fff99b891dc/propcache-0.3.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:ce2ac2675a6aa41ddb2a0c9cbff53780a617ac3d43e620f8fd77ba1c84dcfc06", size = 234562, upload-time = "2025-06-09T22:54:48.982Z" }, + { url = "https://files.pythonhosted.org/packages/a4/7c/3f539fcae630408d0bd8bf3208b9a647ccad10976eda62402a80adf8fc34/propcache-0.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b4239611205294cc433845b914131b2a1f03500ff3c1ed093ed216b82621e1", size = 222142, upload-time = "2025-06-09T22:54:50.424Z" }, + { url = "https://files.pythonhosted.org/packages/7c/d2/34b9eac8c35f79f8a962546b3e97e9d4b990c420ee66ac8255d5d9611648/propcache-0.3.2-cp312-cp312-win32.whl", hash = "sha256:df4a81b9b53449ebc90cc4deefb052c1dd934ba85012aa912c7ea7b7e38b60c1", size = 37711, upload-time = "2025-06-09T22:54:52.072Z" }, + { url = "https://files.pythonhosted.org/packages/19/61/d582be5d226cf79071681d1b46b848d6cb03d7b70af7063e33a2787eaa03/propcache-0.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7046e79b989d7fe457bb755844019e10f693752d169076138abf17f31380800c", size = 41479, upload-time = "2025-06-09T22:54:53.234Z" }, + { url = "https://files.pythonhosted.org/packages/dc/d1/8c747fafa558c603c4ca19d8e20b288aa0c7cda74e9402f50f31eb65267e/propcache-0.3.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ca592ed634a73ca002967458187109265e980422116c0a107cf93d81f95af945", size = 71286, upload-time = "2025-06-09T22:54:54.369Z" }, + { url = "https://files.pythonhosted.org/packages/61/99/d606cb7986b60d89c36de8a85d58764323b3a5ff07770a99d8e993b3fa73/propcache-0.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9ecb0aad4020e275652ba3975740f241bd12a61f1a784df044cf7477a02bc252", size = 42425, upload-time = "2025-06-09T22:54:55.642Z" }, + { url = "https://files.pythonhosted.org/packages/8c/96/ef98f91bbb42b79e9bb82bdd348b255eb9d65f14dbbe3b1594644c4073f7/propcache-0.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7f08f1cc28bd2eade7a8a3d2954ccc673bb02062e3e7da09bc75d843386b342f", size = 41846, upload-time = "2025-06-09T22:54:57.246Z" }, + { url = "https://files.pythonhosted.org/packages/5b/ad/3f0f9a705fb630d175146cd7b1d2bf5555c9beaed54e94132b21aac098a6/propcache-0.3.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1a342c834734edb4be5ecb1e9fb48cb64b1e2320fccbd8c54bf8da8f2a84c33", size = 208871, upload-time = "2025-06-09T22:54:58.975Z" }, + { url = "https://files.pythonhosted.org/packages/3a/38/2085cda93d2c8b6ec3e92af2c89489a36a5886b712a34ab25de9fbca7992/propcache-0.3.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a544caaae1ac73f1fecfae70ded3e93728831affebd017d53449e3ac052ac1e", size = 215720, upload-time = "2025-06-09T22:55:00.471Z" }, + { url = "https://files.pythonhosted.org/packages/61/c1/d72ea2dc83ac7f2c8e182786ab0fc2c7bd123a1ff9b7975bee671866fe5f/propcache-0.3.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:310d11aa44635298397db47a3ebce7db99a4cc4b9bbdfcf6c98a60c8d5261cf1", size = 215203, upload-time = "2025-06-09T22:55:01.834Z" }, + { url = "https://files.pythonhosted.org/packages/af/81/b324c44ae60c56ef12007105f1460d5c304b0626ab0cc6b07c8f2a9aa0b8/propcache-0.3.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1396592321ac83157ac03a2023aa6cc4a3cc3cfdecb71090054c09e5a7cce3", size = 206365, upload-time = "2025-06-09T22:55:03.199Z" }, + { url = "https://files.pythonhosted.org/packages/09/73/88549128bb89e66d2aff242488f62869014ae092db63ccea53c1cc75a81d/propcache-0.3.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cabf5b5902272565e78197edb682017d21cf3b550ba0460ee473753f28d23c1", size = 196016, upload-time = "2025-06-09T22:55:04.518Z" }, + { url = "https://files.pythonhosted.org/packages/b9/3f/3bdd14e737d145114a5eb83cb172903afba7242f67c5877f9909a20d948d/propcache-0.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0a2f2235ac46a7aa25bdeb03a9e7060f6ecbd213b1f9101c43b3090ffb971ef6", size = 205596, upload-time = "2025-06-09T22:55:05.942Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ca/2f4aa819c357d3107c3763d7ef42c03980f9ed5c48c82e01e25945d437c1/propcache-0.3.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:92b69e12e34869a6970fd2f3da91669899994b47c98f5d430b781c26f1d9f387", size = 200977, upload-time = "2025-06-09T22:55:07.792Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4a/e65276c7477533c59085251ae88505caf6831c0e85ff8b2e31ebcbb949b1/propcache-0.3.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:54e02207c79968ebbdffc169591009f4474dde3b4679e16634d34c9363ff56b4", size = 197220, upload-time = "2025-06-09T22:55:09.173Z" }, + { url = "https://files.pythonhosted.org/packages/7c/54/fc7152e517cf5578278b242396ce4d4b36795423988ef39bb8cd5bf274c8/propcache-0.3.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4adfb44cb588001f68c5466579d3f1157ca07f7504fc91ec87862e2b8e556b88", size = 210642, upload-time = "2025-06-09T22:55:10.62Z" }, + { url = "https://files.pythonhosted.org/packages/b9/80/abeb4a896d2767bf5f1ea7b92eb7be6a5330645bd7fb844049c0e4045d9d/propcache-0.3.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fd3e6019dc1261cd0291ee8919dd91fbab7b169bb76aeef6c716833a3f65d206", size = 212789, upload-time = "2025-06-09T22:55:12.029Z" }, + { url = "https://files.pythonhosted.org/packages/b3/db/ea12a49aa7b2b6d68a5da8293dcf50068d48d088100ac016ad92a6a780e6/propcache-0.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4c181cad81158d71c41a2bce88edce078458e2dd5ffee7eddd6b05da85079f43", size = 205880, upload-time = "2025-06-09T22:55:13.45Z" }, + { url = "https://files.pythonhosted.org/packages/d1/e5/9076a0bbbfb65d1198007059c65639dfd56266cf8e477a9707e4b1999ff4/propcache-0.3.2-cp313-cp313-win32.whl", hash = "sha256:8a08154613f2249519e549de2330cf8e2071c2887309a7b07fb56098f5170a02", size = 37220, upload-time = "2025-06-09T22:55:15.284Z" }, + { url = "https://files.pythonhosted.org/packages/d3/f5/b369e026b09a26cd77aa88d8fffd69141d2ae00a2abaaf5380d2603f4b7f/propcache-0.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:e41671f1594fc4ab0a6dec1351864713cb3a279910ae8b58f884a88a0a632c05", size = 40678, upload-time = "2025-06-09T22:55:16.445Z" }, + { url = "https://files.pythonhosted.org/packages/a4/3a/6ece377b55544941a08d03581c7bc400a3c8cd3c2865900a68d5de79e21f/propcache-0.3.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9a3cf035bbaf035f109987d9d55dc90e4b0e36e04bbbb95af3055ef17194057b", size = 76560, upload-time = "2025-06-09T22:55:17.598Z" }, + { url = "https://files.pythonhosted.org/packages/0c/da/64a2bb16418740fa634b0e9c3d29edff1db07f56d3546ca2d86ddf0305e1/propcache-0.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:156c03d07dc1323d8dacaa221fbe028c5c70d16709cdd63502778e6c3ccca1b0", size = 44676, upload-time = "2025-06-09T22:55:18.922Z" }, + { url = "https://files.pythonhosted.org/packages/36/7b/f025e06ea51cb72c52fb87e9b395cced02786610b60a3ed51da8af017170/propcache-0.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:74413c0ba02ba86f55cf60d18daab219f7e531620c15f1e23d95563f505efe7e", size = 44701, upload-time = "2025-06-09T22:55:20.106Z" }, + { url = "https://files.pythonhosted.org/packages/a4/00/faa1b1b7c3b74fc277f8642f32a4c72ba1d7b2de36d7cdfb676db7f4303e/propcache-0.3.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f066b437bb3fa39c58ff97ab2ca351db465157d68ed0440abecb21715eb24b28", size = 276934, upload-time = "2025-06-09T22:55:21.5Z" }, + { url = "https://files.pythonhosted.org/packages/74/ab/935beb6f1756e0476a4d5938ff44bf0d13a055fed880caf93859b4f1baf4/propcache-0.3.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f1304b085c83067914721e7e9d9917d41ad87696bf70f0bc7dee450e9c71ad0a", size = 278316, upload-time = "2025-06-09T22:55:22.918Z" }, + { url = "https://files.pythonhosted.org/packages/f8/9d/994a5c1ce4389610838d1caec74bdf0e98b306c70314d46dbe4fcf21a3e2/propcache-0.3.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ab50cef01b372763a13333b4e54021bdcb291fc9a8e2ccb9c2df98be51bcde6c", size = 282619, upload-time = "2025-06-09T22:55:24.651Z" }, + { url = "https://files.pythonhosted.org/packages/2b/00/a10afce3d1ed0287cef2e09506d3be9822513f2c1e96457ee369adb9a6cd/propcache-0.3.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fad3b2a085ec259ad2c2842666b2a0a49dea8463579c606426128925af1ed725", size = 265896, upload-time = "2025-06-09T22:55:26.049Z" }, + { url = "https://files.pythonhosted.org/packages/2e/a8/2aa6716ffa566ca57c749edb909ad27884680887d68517e4be41b02299f3/propcache-0.3.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:261fa020c1c14deafd54c76b014956e2f86991af198c51139faf41c4d5e83892", size = 252111, upload-time = "2025-06-09T22:55:27.381Z" }, + { url = "https://files.pythonhosted.org/packages/36/4f/345ca9183b85ac29c8694b0941f7484bf419c7f0fea2d1e386b4f7893eed/propcache-0.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:46d7f8aa79c927e5f987ee3a80205c987717d3659f035c85cf0c3680526bdb44", size = 268334, upload-time = "2025-06-09T22:55:28.747Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ca/fcd54f78b59e3f97b3b9715501e3147f5340167733d27db423aa321e7148/propcache-0.3.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:6d8f3f0eebf73e3c0ff0e7853f68be638b4043c65a70517bb575eff54edd8dbe", size = 255026, upload-time = "2025-06-09T22:55:30.184Z" }, + { url = "https://files.pythonhosted.org/packages/8b/95/8e6a6bbbd78ac89c30c225210a5c687790e532ba4088afb8c0445b77ef37/propcache-0.3.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:03c89c1b14a5452cf15403e291c0ccd7751d5b9736ecb2c5bab977ad6c5bcd81", size = 250724, upload-time = "2025-06-09T22:55:31.646Z" }, + { url = "https://files.pythonhosted.org/packages/ee/b0/0dd03616142baba28e8b2d14ce5df6631b4673850a3d4f9c0f9dd714a404/propcache-0.3.2-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:0cc17efde71e12bbaad086d679ce575268d70bc123a5a71ea7ad76f70ba30bba", size = 268868, upload-time = "2025-06-09T22:55:33.209Z" }, + { url = "https://files.pythonhosted.org/packages/c5/98/2c12407a7e4fbacd94ddd32f3b1e3d5231e77c30ef7162b12a60e2dd5ce3/propcache-0.3.2-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:acdf05d00696bc0447e278bb53cb04ca72354e562cf88ea6f9107df8e7fd9770", size = 271322, upload-time = "2025-06-09T22:55:35.065Z" }, + { url = "https://files.pythonhosted.org/packages/35/91/9cb56efbb428b006bb85db28591e40b7736847b8331d43fe335acf95f6c8/propcache-0.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4445542398bd0b5d32df908031cb1b30d43ac848e20470a878b770ec2dcc6330", size = 265778, upload-time = "2025-06-09T22:55:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4c/b0fe775a2bdd01e176b14b574be679d84fc83958335790f7c9a686c1f468/propcache-0.3.2-cp313-cp313t-win32.whl", hash = "sha256:f86e5d7cd03afb3a1db8e9f9f6eff15794e79e791350ac48a8c924e6f439f394", size = 41175, upload-time = "2025-06-09T22:55:38.436Z" }, + { url = "https://files.pythonhosted.org/packages/a4/ff/47f08595e3d9b5e149c150f88d9714574f1a7cbd89fe2817158a952674bf/propcache-0.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:9704bedf6e7cbe3c65eca4379a9b53ee6a83749f047808cbb5044d40d7d72198", size = 44857, upload-time = "2025-06-09T22:55:39.687Z" }, + { url = "https://files.pythonhosted.org/packages/cc/35/cc0aaecf278bb4575b8555f2b137de5ab821595ddae9da9d3cd1da4072c7/propcache-0.3.2-py3-none-any.whl", hash = "sha256:98f1ec44fb675f5052cccc8e609c46ed23a35a1cfd18545ad4e29002d858a43f", size = 12663, upload-time = "2025-06-09T22:56:04.484Z" }, +] + [[package]] name = "proto-plus" version = "1.26.1" @@ -1009,57 +1654,59 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/af/ab3c51ab7507a7325e98ffe691d9495ee3d3aa5f589afad65ec920d39821/protobuf-6.31.1-py3-none-any.whl", hash = "sha256:720a6c7e6b77288b85063569baae8536671b39f15cc22037ec7045658d80489e", size = 168724, upload-time = "2025-05-28T19:25:53.926Z" }, ] +[[package]] +name = "pyaml" +version = "25.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/01/41f63d66a801a561c9e335523516bd5f761bc43cc61f8b75918306bf2da8/pyaml-25.7.0.tar.gz", hash = "sha256:e113a64ec16881bf2b092e2beb84b7dcf1bd98096ad17f5f14e8fb782a75d99b", size = 29814, upload-time = "2025-07-10T18:44:51.824Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/ee/a878f2ad010cbccb311f947f0f2f09d38f613938ee28c34e60fceecc75a1/pyaml-25.7.0-py3-none-any.whl", hash = "sha256:ce5d7867cc2b455efdb9b0448324ff7b9f74d99f64650f12ca570102db6b985f", size = 26418, upload-time = "2025-07-10T18:44:50.679Z" }, +] + [[package]] name = "pyarrow" -version = "20.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/ee/a7810cb9f3d6e9238e61d312076a9859bf3668fd21c69744de9532383912/pyarrow-20.0.0.tar.gz", hash = "sha256:febc4a913592573c8d5805091a6c2b5064c8bd6e002131f01061797d91c783c1", size = 1125187, upload-time = "2025-04-27T12:34:23.264Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/5b/23/77094eb8ee0dbe88441689cb6afc40ac312a1e15d3a7acc0586999518222/pyarrow-20.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:c7dd06fd7d7b410ca5dc839cc9d485d2bc4ae5240851bcd45d85105cc90a47d7", size = 30832591, upload-time = "2025-04-27T12:27:27.89Z" }, - { url = "https://files.pythonhosted.org/packages/c3/d5/48cc573aff00d62913701d9fac478518f693b30c25f2c157550b0b2565cb/pyarrow-20.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:d5382de8dc34c943249b01c19110783d0d64b207167c728461add1ecc2db88e4", size = 32273686, upload-time = "2025-04-27T12:27:36.816Z" }, - { url = "https://files.pythonhosted.org/packages/37/df/4099b69a432b5cb412dd18adc2629975544d656df3d7fda6d73c5dba935d/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6415a0d0174487456ddc9beaead703d0ded5966129fa4fd3114d76b5d1c5ceae", size = 41337051, upload-time = "2025-04-27T12:27:44.4Z" }, - { url = "https://files.pythonhosted.org/packages/4c/27/99922a9ac1c9226f346e3a1e15e63dee6f623ed757ff2893f9d6994a69d3/pyarrow-20.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15aa1b3b2587e74328a730457068dc6c89e6dcbf438d4369f572af9d320a25ee", size = 42404659, upload-time = "2025-04-27T12:27:51.715Z" }, - { url = "https://files.pythonhosted.org/packages/21/d1/71d91b2791b829c9e98f1e0d85be66ed93aff399f80abb99678511847eaa/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:5605919fbe67a7948c1f03b9f3727d82846c053cd2ce9303ace791855923fd20", size = 40695446, upload-time = "2025-04-27T12:27:59.643Z" }, - { url = "https://files.pythonhosted.org/packages/f1/ca/ae10fba419a6e94329707487835ec721f5a95f3ac9168500bcf7aa3813c7/pyarrow-20.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a5704f29a74b81673d266e5ec1fe376f060627c2e42c5c7651288ed4b0db29e9", size = 42278528, upload-time = "2025-04-27T12:28:07.297Z" }, - { url = "https://files.pythonhosted.org/packages/7a/a6/aba40a2bf01b5d00cf9cd16d427a5da1fad0fb69b514ce8c8292ab80e968/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:00138f79ee1b5aca81e2bdedb91e3739b987245e11fa3c826f9e57c5d102fb75", size = 42918162, upload-time = "2025-04-27T12:28:15.716Z" }, - { url = "https://files.pythonhosted.org/packages/93/6b/98b39650cd64f32bf2ec6d627a9bd24fcb3e4e6ea1873c5e1ea8a83b1a18/pyarrow-20.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f2d67ac28f57a362f1a2c1e6fa98bfe2f03230f7e15927aecd067433b1e70ce8", size = 44550319, upload-time = "2025-04-27T12:28:27.026Z" }, - { url = "https://files.pythonhosted.org/packages/ab/32/340238be1eb5037e7b5de7e640ee22334417239bc347eadefaf8c373936d/pyarrow-20.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:4a8b029a07956b8d7bd742ffca25374dd3f634b35e46cc7a7c3fa4c75b297191", size = 25770759, upload-time = "2025-04-27T12:28:33.702Z" }, - { url = "https://files.pythonhosted.org/packages/47/a2/b7930824181ceadd0c63c1042d01fa4ef63eee233934826a7a2a9af6e463/pyarrow-20.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:24ca380585444cb2a31324c546a9a56abbe87e26069189e14bdba19c86c049f0", size = 30856035, upload-time = "2025-04-27T12:28:40.78Z" }, - { url = "https://files.pythonhosted.org/packages/9b/18/c765770227d7f5bdfa8a69f64b49194352325c66a5c3bb5e332dfd5867d9/pyarrow-20.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:95b330059ddfdc591a3225f2d272123be26c8fa76e8c9ee1a77aad507361cfdb", size = 32309552, upload-time = "2025-04-27T12:28:47.051Z" }, - { url = "https://files.pythonhosted.org/packages/44/fb/dfb2dfdd3e488bb14f822d7335653092dde150cffc2da97de6e7500681f9/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f0fb1041267e9968c6d0d2ce3ff92e3928b243e2b6d11eeb84d9ac547308232", size = 41334704, upload-time = "2025-04-27T12:28:55.064Z" }, - { url = "https://files.pythonhosted.org/packages/58/0d/08a95878d38808051a953e887332d4a76bc06c6ee04351918ee1155407eb/pyarrow-20.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8ff87cc837601532cc8242d2f7e09b4e02404de1b797aee747dd4ba4bd6313f", size = 42399836, upload-time = "2025-04-27T12:29:02.13Z" }, - { url = "https://files.pythonhosted.org/packages/f3/cd/efa271234dfe38f0271561086eedcad7bc0f2ddd1efba423916ff0883684/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:7a3a5dcf54286e6141d5114522cf31dd67a9e7c9133d150799f30ee302a7a1ab", size = 40711789, upload-time = "2025-04-27T12:29:09.951Z" }, - { url = "https://files.pythonhosted.org/packages/46/1f/7f02009bc7fc8955c391defee5348f510e589a020e4b40ca05edcb847854/pyarrow-20.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a6ad3e7758ecf559900261a4df985662df54fb7fdb55e8e3b3aa99b23d526b62", size = 42301124, upload-time = "2025-04-27T12:29:17.187Z" }, - { url = "https://files.pythonhosted.org/packages/4f/92/692c562be4504c262089e86757a9048739fe1acb4024f92d39615e7bab3f/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6bb830757103a6cb300a04610e08d9636f0cd223d32f388418ea893a3e655f1c", size = 42916060, upload-time = "2025-04-27T12:29:24.253Z" }, - { url = "https://files.pythonhosted.org/packages/a4/ec/9f5c7e7c828d8e0a3c7ef50ee62eca38a7de2fa6eb1b8fa43685c9414fef/pyarrow-20.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:96e37f0766ecb4514a899d9a3554fadda770fb57ddf42b63d80f14bc20aa7db3", size = 44547640, upload-time = "2025-04-27T12:29:32.782Z" }, - { url = "https://files.pythonhosted.org/packages/54/96/46613131b4727f10fd2ffa6d0d6f02efcc09a0e7374eff3b5771548aa95b/pyarrow-20.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3346babb516f4b6fd790da99b98bed9708e3f02e734c84971faccb20736848dc", size = 25781491, upload-time = "2025-04-27T12:29:38.464Z" }, - { url = "https://files.pythonhosted.org/packages/a1/d6/0c10e0d54f6c13eb464ee9b67a68b8c71bcf2f67760ef5b6fbcddd2ab05f/pyarrow-20.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:75a51a5b0eef32727a247707d4755322cb970be7e935172b6a3a9f9ae98404ba", size = 30815067, upload-time = "2025-04-27T12:29:44.384Z" }, - { url = "https://files.pythonhosted.org/packages/7e/e2/04e9874abe4094a06fd8b0cbb0f1312d8dd7d707f144c2ec1e5e8f452ffa/pyarrow-20.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:211d5e84cecc640c7a3ab900f930aaff5cd2702177e0d562d426fb7c4f737781", size = 32297128, upload-time = "2025-04-27T12:29:52.038Z" }, - { url = "https://files.pythonhosted.org/packages/31/fd/c565e5dcc906a3b471a83273039cb75cb79aad4a2d4a12f76cc5ae90a4b8/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ba3cf4182828be7a896cbd232aa8dd6a31bd1f9e32776cc3796c012855e1199", size = 41334890, upload-time = "2025-04-27T12:29:59.452Z" }, - { url = "https://files.pythonhosted.org/packages/af/a9/3bdd799e2c9b20c1ea6dc6fa8e83f29480a97711cf806e823f808c2316ac/pyarrow-20.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c3a01f313ffe27ac4126f4c2e5ea0f36a5fc6ab51f8726cf41fee4b256680bd", size = 42421775, upload-time = "2025-04-27T12:30:06.875Z" }, - { url = "https://files.pythonhosted.org/packages/10/f7/da98ccd86354c332f593218101ae56568d5dcedb460e342000bd89c49cc1/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:a2791f69ad72addd33510fec7bb14ee06c2a448e06b649e264c094c5b5f7ce28", size = 40687231, upload-time = "2025-04-27T12:30:13.954Z" }, - { url = "https://files.pythonhosted.org/packages/bb/1b/2168d6050e52ff1e6cefc61d600723870bf569cbf41d13db939c8cf97a16/pyarrow-20.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4250e28a22302ce8692d3a0e8ec9d9dde54ec00d237cff4dfa9c1fbf79e472a8", size = 42295639, upload-time = "2025-04-27T12:30:21.949Z" }, - { url = "https://files.pythonhosted.org/packages/b2/66/2d976c0c7158fd25591c8ca55aee026e6d5745a021915a1835578707feb3/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:89e030dc58fc760e4010148e6ff164d2f44441490280ef1e97a542375e41058e", size = 42908549, upload-time = "2025-04-27T12:30:29.551Z" }, - { url = "https://files.pythonhosted.org/packages/31/a9/dfb999c2fc6911201dcbf348247f9cc382a8990f9ab45c12eabfd7243a38/pyarrow-20.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6102b4864d77102dbbb72965618e204e550135a940c2534711d5ffa787df2a5a", size = 44557216, upload-time = "2025-04-27T12:30:36.977Z" }, - { url = "https://files.pythonhosted.org/packages/a0/8e/9adee63dfa3911be2382fb4d92e4b2e7d82610f9d9f668493bebaa2af50f/pyarrow-20.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:96d6a0a37d9c98be08f5ed6a10831d88d52cac7b13f5287f1e0f625a0de8062b", size = 25660496, upload-time = "2025-04-27T12:30:42.809Z" }, - { url = "https://files.pythonhosted.org/packages/9b/aa/daa413b81446d20d4dad2944110dcf4cf4f4179ef7f685dd5a6d7570dc8e/pyarrow-20.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:a15532e77b94c61efadde86d10957950392999503b3616b2ffcef7621a002893", size = 30798501, upload-time = "2025-04-27T12:30:48.351Z" }, - { url = "https://files.pythonhosted.org/packages/ff/75/2303d1caa410925de902d32ac215dc80a7ce7dd8dfe95358c165f2adf107/pyarrow-20.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:dd43f58037443af715f34f1322c782ec463a3c8a94a85fdb2d987ceb5658e061", size = 32277895, upload-time = "2025-04-27T12:30:55.238Z" }, - { url = "https://files.pythonhosted.org/packages/92/41/fe18c7c0b38b20811b73d1bdd54b1fccba0dab0e51d2048878042d84afa8/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa0d288143a8585806e3cc7c39566407aab646fb9ece164609dac1cfff45f6ae", size = 41327322, upload-time = "2025-04-27T12:31:05.587Z" }, - { url = "https://files.pythonhosted.org/packages/da/ab/7dbf3d11db67c72dbf36ae63dcbc9f30b866c153b3a22ef728523943eee6/pyarrow-20.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6953f0114f8d6f3d905d98e987d0924dabce59c3cda380bdfaa25a6201563b4", size = 42411441, upload-time = "2025-04-27T12:31:15.675Z" }, - { url = "https://files.pythonhosted.org/packages/90/c3/0c7da7b6dac863af75b64e2f827e4742161128c350bfe7955b426484e226/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:991f85b48a8a5e839b2128590ce07611fae48a904cae6cab1f089c5955b57eb5", size = 40677027, upload-time = "2025-04-27T12:31:24.631Z" }, - { url = "https://files.pythonhosted.org/packages/be/27/43a47fa0ff9053ab5203bb3faeec435d43c0d8bfa40179bfd076cdbd4e1c/pyarrow-20.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:97c8dc984ed09cb07d618d57d8d4b67a5100a30c3818c2fb0b04599f0da2de7b", size = 42281473, upload-time = "2025-04-27T12:31:31.311Z" }, - { url = "https://files.pythonhosted.org/packages/bc/0b/d56c63b078876da81bbb9ba695a596eabee9b085555ed12bf6eb3b7cab0e/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9b71daf534f4745818f96c214dbc1e6124d7daf059167330b610fc69b6f3d3e3", size = 42893897, upload-time = "2025-04-27T12:31:39.406Z" }, - { url = "https://files.pythonhosted.org/packages/92/ac/7d4bd020ba9145f354012838692d48300c1b8fe5634bfda886abcada67ed/pyarrow-20.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e8b88758f9303fa5a83d6c90e176714b2fd3852e776fc2d7e42a22dd6c2fb368", size = 44543847, upload-time = "2025-04-27T12:31:45.997Z" }, - { url = "https://files.pythonhosted.org/packages/9d/07/290f4abf9ca702c5df7b47739c1b2c83588641ddfa2cc75e34a301d42e55/pyarrow-20.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:30b3051b7975801c1e1d387e17c588d8ab05ced9b1e14eec57915f79869b5031", size = 25653219, upload-time = "2025-04-27T12:31:54.11Z" }, - { url = "https://files.pythonhosted.org/packages/95/df/720bb17704b10bd69dde086e1400b8eefb8f58df3f8ac9cff6c425bf57f1/pyarrow-20.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:ca151afa4f9b7bc45bcc791eb9a89e90a9eb2772767d0b1e5389609c7d03db63", size = 30853957, upload-time = "2025-04-27T12:31:59.215Z" }, - { url = "https://files.pythonhosted.org/packages/d9/72/0d5f875efc31baef742ba55a00a25213a19ea64d7176e0fe001c5d8b6e9a/pyarrow-20.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:4680f01ecd86e0dd63e39eb5cd59ef9ff24a9d166db328679e36c108dc993d4c", size = 32247972, upload-time = "2025-04-27T12:32:05.369Z" }, - { url = "https://files.pythonhosted.org/packages/d5/bc/e48b4fa544d2eea72f7844180eb77f83f2030b84c8dad860f199f94307ed/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4c8534e2ff059765647aa69b75d6543f9fef59e2cd4c6d18015192565d2b70", size = 41256434, upload-time = "2025-04-27T12:32:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/c3/01/974043a29874aa2cf4f87fb07fd108828fc7362300265a2a64a94965e35b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e1f8a47f4b4ae4c69c4d702cfbdfe4d41e18e5c7ef6f1bb1c50918c1e81c57b", size = 42353648, upload-time = "2025-04-27T12:32:20.766Z" }, - { url = "https://files.pythonhosted.org/packages/68/95/cc0d3634cde9ca69b0e51cbe830d8915ea32dda2157560dda27ff3b3337b/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:a1f60dc14658efaa927f8214734f6a01a806d7690be4b3232ba526836d216122", size = 40619853, upload-time = "2025-04-27T12:32:28.1Z" }, - { url = "https://files.pythonhosted.org/packages/29/c2/3ad40e07e96a3e74e7ed7cc8285aadfa84eb848a798c98ec0ad009eb6bcc/pyarrow-20.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:204a846dca751428991346976b914d6d2a82ae5b8316a6ed99789ebf976551e6", size = 42241743, upload-time = "2025-04-27T12:32:35.792Z" }, - { url = "https://files.pythonhosted.org/packages/eb/cb/65fa110b483339add6a9bc7b6373614166b14e20375d4daa73483755f830/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f3b117b922af5e4c6b9a9115825726cac7d8b1421c37c2b5e24fbacc8930612c", size = 42839441, upload-time = "2025-04-27T12:32:46.64Z" }, - { url = "https://files.pythonhosted.org/packages/98/7b/f30b1954589243207d7a0fbc9997401044bf9a033eec78f6cb50da3f304a/pyarrow-20.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e724a3fd23ae5b9c010e7be857f4405ed5e679db5c93e66204db1a69f733936a", size = 44503279, upload-time = "2025-04-27T12:32:56.503Z" }, - { url = "https://files.pythonhosted.org/packages/37/40/ad395740cd641869a13bcf60851296c89624662575621968dcfafabaa7f6/pyarrow-20.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:82f1ee5133bd8f49d31be1299dc07f585136679666b502540db854968576faf9", size = 25944982, upload-time = "2025-04-27T12:33:04.72Z" }, +version = "21.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/c2/ea068b8f00905c06329a3dfcd40d0fcc2b7d0f2e355bdb25b65e0a0e4cd4/pyarrow-21.0.0.tar.gz", hash = "sha256:5051f2dccf0e283ff56335760cbc8622cf52264d67e359d5569541ac11b6d5bc", size = 1133487, upload-time = "2025-07-18T00:57:31.761Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/d9/110de31880016e2afc52d8580b397dbe47615defbf09ca8cf55f56c62165/pyarrow-21.0.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e563271e2c5ff4d4a4cbeb2c83d5cf0d4938b891518e676025f7268c6fe5fe26", size = 31196837, upload-time = "2025-07-18T00:54:34.755Z" }, + { url = "https://files.pythonhosted.org/packages/df/5f/c1c1997613abf24fceb087e79432d24c19bc6f7259cab57c2c8e5e545fab/pyarrow-21.0.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:fee33b0ca46f4c85443d6c450357101e47d53e6c3f008d658c27a2d020d44c79", size = 32659470, upload-time = "2025-07-18T00:54:38.329Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ed/b1589a777816ee33ba123ba1e4f8f02243a844fed0deec97bde9fb21a5cf/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:7be45519b830f7c24b21d630a31d48bcebfd5d4d7f9d3bdb49da9cdf6d764edb", size = 41055619, upload-time = "2025-07-18T00:54:42.172Z" }, + { url = "https://files.pythonhosted.org/packages/44/28/b6672962639e85dc0ac36f71ab3a8f5f38e01b51343d7aa372a6b56fa3f3/pyarrow-21.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:26bfd95f6bff443ceae63c65dc7e048670b7e98bc892210acba7e4995d3d4b51", size = 42733488, upload-time = "2025-07-18T00:54:47.132Z" }, + { url = "https://files.pythonhosted.org/packages/f8/cc/de02c3614874b9089c94eac093f90ca5dfa6d5afe45de3ba847fd950fdf1/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bd04ec08f7f8bd113c55868bd3fc442a9db67c27af098c5f814a3091e71cc61a", size = 43329159, upload-time = "2025-07-18T00:54:51.686Z" }, + { url = "https://files.pythonhosted.org/packages/a6/3e/99473332ac40278f196e105ce30b79ab8affab12f6194802f2593d6b0be2/pyarrow-21.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9b0b14b49ac10654332a805aedfc0147fb3469cbf8ea951b3d040dab12372594", size = 45050567, upload-time = "2025-07-18T00:54:56.679Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f5/c372ef60593d713e8bfbb7e0c743501605f0ad00719146dc075faf11172b/pyarrow-21.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:9d9f8bcb4c3be7738add259738abdeddc363de1b80e3310e04067aa1ca596634", size = 26217959, upload-time = "2025-07-18T00:55:00.482Z" }, + { url = "https://files.pythonhosted.org/packages/94/dc/80564a3071a57c20b7c32575e4a0120e8a330ef487c319b122942d665960/pyarrow-21.0.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c077f48aab61738c237802836fc3844f85409a46015635198761b0d6a688f87b", size = 31243234, upload-time = "2025-07-18T00:55:03.812Z" }, + { url = "https://files.pythonhosted.org/packages/ea/cc/3b51cb2db26fe535d14f74cab4c79b191ed9a8cd4cbba45e2379b5ca2746/pyarrow-21.0.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:689f448066781856237eca8d1975b98cace19b8dd2ab6145bf49475478bcaa10", size = 32714370, upload-time = "2025-07-18T00:55:07.495Z" }, + { url = "https://files.pythonhosted.org/packages/24/11/a4431f36d5ad7d83b87146f515c063e4d07ef0b7240876ddb885e6b44f2e/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:479ee41399fcddc46159a551705b89c05f11e8b8cb8e968f7fec64f62d91985e", size = 41135424, upload-time = "2025-07-18T00:55:11.461Z" }, + { url = "https://files.pythonhosted.org/packages/74/dc/035d54638fc5d2971cbf1e987ccd45f1091c83bcf747281cf6cc25e72c88/pyarrow-21.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:40ebfcb54a4f11bcde86bc586cbd0272bac0d516cfa539c799c2453768477569", size = 42823810, upload-time = "2025-07-18T00:55:16.301Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3b/89fced102448a9e3e0d4dded1f37fa3ce4700f02cdb8665457fcc8015f5b/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8d58d8497814274d3d20214fbb24abcad2f7e351474357d552a8d53bce70c70e", size = 43391538, upload-time = "2025-07-18T00:55:23.82Z" }, + { url = "https://files.pythonhosted.org/packages/fb/bb/ea7f1bd08978d39debd3b23611c293f64a642557e8141c80635d501e6d53/pyarrow-21.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:585e7224f21124dd57836b1530ac8f2df2afc43c861d7bf3d58a4870c42ae36c", size = 45120056, upload-time = "2025-07-18T00:55:28.231Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0b/77ea0600009842b30ceebc3337639a7380cd946061b620ac1a2f3cb541e2/pyarrow-21.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:555ca6935b2cbca2c0e932bedd853e9bc523098c39636de9ad4693b5b1df86d6", size = 26220568, upload-time = "2025-07-18T00:55:32.122Z" }, + { url = "https://files.pythonhosted.org/packages/ca/d4/d4f817b21aacc30195cf6a46ba041dd1be827efa4a623cc8bf39a1c2a0c0/pyarrow-21.0.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:3a302f0e0963db37e0a24a70c56cf91a4faa0bca51c23812279ca2e23481fccd", size = 31160305, upload-time = "2025-07-18T00:55:35.373Z" }, + { url = "https://files.pythonhosted.org/packages/a2/9c/dcd38ce6e4b4d9a19e1d36914cb8e2b1da4e6003dd075474c4cfcdfe0601/pyarrow-21.0.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:b6b27cf01e243871390474a211a7922bfbe3bda21e39bc9160daf0da3fe48876", size = 32684264, upload-time = "2025-07-18T00:55:39.303Z" }, + { url = "https://files.pythonhosted.org/packages/4f/74/2a2d9f8d7a59b639523454bec12dba35ae3d0a07d8ab529dc0809f74b23c/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:e72a8ec6b868e258a2cd2672d91f2860ad532d590ce94cdf7d5e7ec674ccf03d", size = 41108099, upload-time = "2025-07-18T00:55:42.889Z" }, + { url = "https://files.pythonhosted.org/packages/ad/90/2660332eeb31303c13b653ea566a9918484b6e4d6b9d2d46879a33ab0622/pyarrow-21.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:b7ae0bbdc8c6674259b25bef5d2a1d6af5d39d7200c819cf99e07f7dfef1c51e", size = 42829529, upload-time = "2025-07-18T00:55:47.069Z" }, + { url = "https://files.pythonhosted.org/packages/33/27/1a93a25c92717f6aa0fca06eb4700860577d016cd3ae51aad0e0488ac899/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:58c30a1729f82d201627c173d91bd431db88ea74dcaa3885855bc6203e433b82", size = 43367883, upload-time = "2025-07-18T00:55:53.069Z" }, + { url = "https://files.pythonhosted.org/packages/05/d9/4d09d919f35d599bc05c6950095e358c3e15148ead26292dfca1fb659b0c/pyarrow-21.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:072116f65604b822a7f22945a7a6e581cfa28e3454fdcc6939d4ff6090126623", size = 45133802, upload-time = "2025-07-18T00:55:57.714Z" }, + { url = "https://files.pythonhosted.org/packages/71/30/f3795b6e192c3ab881325ffe172e526499eb3780e306a15103a2764916a2/pyarrow-21.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:cf56ec8b0a5c8c9d7021d6fd754e688104f9ebebf1bf4449613c9531f5346a18", size = 26203175, upload-time = "2025-07-18T00:56:01.364Z" }, + { url = "https://files.pythonhosted.org/packages/16/ca/c7eaa8e62db8fb37ce942b1ea0c6d7abfe3786ca193957afa25e71b81b66/pyarrow-21.0.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:e99310a4ebd4479bcd1964dff9e14af33746300cb014aa4a3781738ac63baf4a", size = 31154306, upload-time = "2025-07-18T00:56:04.42Z" }, + { url = "https://files.pythonhosted.org/packages/ce/e8/e87d9e3b2489302b3a1aea709aaca4b781c5252fcb812a17ab6275a9a484/pyarrow-21.0.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:d2fe8e7f3ce329a71b7ddd7498b3cfac0eeb200c2789bd840234f0dc271a8efe", size = 32680622, upload-time = "2025-07-18T00:56:07.505Z" }, + { url = "https://files.pythonhosted.org/packages/84/52/79095d73a742aa0aba370c7942b1b655f598069489ab387fe47261a849e1/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f522e5709379d72fb3da7785aa489ff0bb87448a9dc5a75f45763a795a089ebd", size = 41104094, upload-time = "2025-07-18T00:56:10.994Z" }, + { url = "https://files.pythonhosted.org/packages/89/4b/7782438b551dbb0468892a276b8c789b8bbdb25ea5c5eb27faadd753e037/pyarrow-21.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:69cbbdf0631396e9925e048cfa5bce4e8c3d3b41562bbd70c685a8eb53a91e61", size = 42825576, upload-time = "2025-07-18T00:56:15.569Z" }, + { url = "https://files.pythonhosted.org/packages/b3/62/0f29de6e0a1e33518dec92c65be0351d32d7ca351e51ec5f4f837a9aab91/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:731c7022587006b755d0bdb27626a1a3bb004bb56b11fb30d98b6c1b4718579d", size = 43368342, upload-time = "2025-07-18T00:56:19.531Z" }, + { url = "https://files.pythonhosted.org/packages/90/c7/0fa1f3f29cf75f339768cc698c8ad4ddd2481c1742e9741459911c9ac477/pyarrow-21.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:dc56bc708f2d8ac71bd1dcb927e458c93cec10b98eb4120206a4091db7b67b99", size = 45131218, upload-time = "2025-07-18T00:56:23.347Z" }, + { url = "https://files.pythonhosted.org/packages/01/63/581f2076465e67b23bc5a37d4a2abff8362d389d29d8105832e82c9c811c/pyarrow-21.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:186aa00bca62139f75b7de8420f745f2af12941595bbbfa7ed3870ff63e25636", size = 26087551, upload-time = "2025-07-18T00:56:26.758Z" }, + { url = "https://files.pythonhosted.org/packages/c9/ab/357d0d9648bb8241ee7348e564f2479d206ebe6e1c47ac5027c2e31ecd39/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:a7a102574faa3f421141a64c10216e078df467ab9576684d5cd696952546e2da", size = 31290064, upload-time = "2025-07-18T00:56:30.214Z" }, + { url = "https://files.pythonhosted.org/packages/3f/8a/5685d62a990e4cac2043fc76b4661bf38d06efed55cf45a334b455bd2759/pyarrow-21.0.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:1e005378c4a2c6db3ada3ad4c217b381f6c886f0a80d6a316fe586b90f77efd7", size = 32727837, upload-time = "2025-07-18T00:56:33.935Z" }, + { url = "https://files.pythonhosted.org/packages/fc/de/c0828ee09525c2bafefd3e736a248ebe764d07d0fd762d4f0929dbc516c9/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:65f8e85f79031449ec8706b74504a316805217b35b6099155dd7e227eef0d4b6", size = 41014158, upload-time = "2025-07-18T00:56:37.528Z" }, + { url = "https://files.pythonhosted.org/packages/6e/26/a2865c420c50b7a3748320b614f3484bfcde8347b2639b2b903b21ce6a72/pyarrow-21.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:3a81486adc665c7eb1a2bde0224cfca6ceaba344a82a971ef059678417880eb8", size = 42667885, upload-time = "2025-07-18T00:56:41.483Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f9/4ee798dc902533159250fb4321267730bc0a107d8c6889e07c3add4fe3a5/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fc0d2f88b81dcf3ccf9a6ae17f89183762c8a94a5bdcfa09e05cfe413acf0503", size = 43276625, upload-time = "2025-07-18T00:56:48.002Z" }, + { url = "https://files.pythonhosted.org/packages/5a/da/e02544d6997037a4b0d22d8e5f66bc9315c3671371a8b18c79ade1cefe14/pyarrow-21.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6299449adf89df38537837487a4f8d3bd91ec94354fdd2a7d30bc11c48ef6e79", size = 44951890, upload-time = "2025-07-18T00:56:52.568Z" }, + { url = "https://files.pythonhosted.org/packages/e5/4e/519c1bc1876625fe6b71e9a28287c43ec2f20f73c658b9ae1d485c0c206e/pyarrow-21.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:222c39e2c70113543982c6b34f3077962b44fca38c0bd9e68bb6781534425c10", size = 26371006, upload-time = "2025-07-18T00:56:56.379Z" }, ] [[package]] @@ -1236,6 +1883,69 @@ crypto = [ { name = "cryptography" }, ] +[[package]] +name = "pyperclip" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/23/2f0a3efc4d6a32f3b63cdff36cd398d9701d26cda58e3ab97ac79fb5e60d/pyperclip-1.9.0.tar.gz", hash = "sha256:b7de0142ddc81bfc5c7507eea19da920b92252b548b96186caf94a5e2527d310", size = 20961, upload-time = "2024-06-18T20:38:48.401Z" } + +[[package]] +name = "pytest" +version = "8.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/08/ba/45911d754e8eba3d5a841a5ce61a65a685ff1798421ac054f85aa8747dfb/pytest-8.4.1.tar.gz", hash = "sha256:7c67fd69174877359ed9371ec3af8a3d2b04741818c51e5e99cc1742251fa93c", size = 1517714, upload-time = "2025-06-18T05:48:06.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/29/16/c8a903f4c4dffe7a12843191437d7cd8e32751d5de349d45d3fe69544e87/pytest-8.4.1-py3-none-any.whl", hash = "sha256:539c70ba6fcead8e78eebbf1115e8b589e7565830d7d006a8723f19ac8a0afb7", size = 365474, upload-time = "2025-06-18T05:48:03.955Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.2.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/28/67172c96ba684058a4d24ffe144d64783d2a270d0af0d9e792737bddc75c/pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e", size = 33241, upload-time = "2025-05-26T13:58:45.167Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/05/77b60e520511c53d1c1ca75f1930c7dd8e971d0c4379b7f4b3f9644685ba/pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0", size = 9923, upload-time = "2025-05-26T13:58:43.487Z" }, +] + [[package]] name = "python-dateutil" version = "2.9.0.post0" @@ -1294,6 +2004,152 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, ] +[[package]] +name = "pywin32" +version = "311" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7b/40/44efbb0dfbd33aca6a6483191dae0716070ed99e2ecb0c53683f400a0b4f/pywin32-311-cp310-cp310-win32.whl", hash = "sha256:d03ff496d2a0cd4a5893504789d4a15399133fe82517455e78bad62efbb7f0a3", size = 8760432, upload-time = "2025-07-14T20:13:05.9Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bf/360243b1e953bd254a82f12653974be395ba880e7ec23e3731d9f73921cc/pywin32-311-cp310-cp310-win_amd64.whl", hash = "sha256:797c2772017851984b97180b0bebe4b620bb86328e8a884bb626156295a63b3b", size = 9590103, upload-time = "2025-07-14T20:13:07.698Z" }, + { url = "https://files.pythonhosted.org/packages/57/38/d290720e6f138086fb3d5ffe0b6caa019a791dd57866940c82e4eeaf2012/pywin32-311-cp310-cp310-win_arm64.whl", hash = "sha256:0502d1facf1fed4839a9a51ccbcc63d952cf318f78ffc00a7e78528ac27d7a2b", size = 8778557, upload-time = "2025-07-14T20:13:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/449a6a91e5d6db51420875c54f6aff7c97a86a3b13a0b4f1a5c13b988de3/pywin32-311-cp311-cp311-win32.whl", hash = "sha256:184eb5e436dea364dcd3d2316d577d625c0351bf237c4e9a5fabbcfa5a58b151", size = 8697031, upload-time = "2025-07-14T20:13:13.266Z" }, + { url = "https://files.pythonhosted.org/packages/51/8f/9bb81dd5bb77d22243d33c8397f09377056d5c687aa6d4042bea7fbf8364/pywin32-311-cp311-cp311-win_amd64.whl", hash = "sha256:3ce80b34b22b17ccbd937a6e78e7225d80c52f5ab9940fe0506a1a16f3dab503", size = 9508308, upload-time = "2025-07-14T20:13:15.147Z" }, + { url = "https://files.pythonhosted.org/packages/44/7b/9c2ab54f74a138c491aba1b1cd0795ba61f144c711daea84a88b63dc0f6c/pywin32-311-cp311-cp311-win_arm64.whl", hash = "sha256:a733f1388e1a842abb67ffa8e7aad0e70ac519e09b0f6a784e65a136ec7cefd2", size = 8703930, upload-time = "2025-07-14T20:13:16.945Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ab/01ea1943d4eba0f850c3c61e78e8dd59757ff815ff3ccd0a84de5f541f42/pywin32-311-cp312-cp312-win32.whl", hash = "sha256:750ec6e621af2b948540032557b10a2d43b0cee2ae9758c54154d711cc852d31", size = 8706543, upload-time = "2025-07-14T20:13:20.765Z" }, + { url = "https://files.pythonhosted.org/packages/d1/a8/a0e8d07d4d051ec7502cd58b291ec98dcc0c3fff027caad0470b72cfcc2f/pywin32-311-cp312-cp312-win_amd64.whl", hash = "sha256:b8c095edad5c211ff31c05223658e71bf7116daa0ecf3ad85f3201ea3190d067", size = 9495040, upload-time = "2025-07-14T20:13:22.543Z" }, + { url = "https://files.pythonhosted.org/packages/ba/3a/2ae996277b4b50f17d61f0603efd8253cb2d79cc7ae159468007b586396d/pywin32-311-cp312-cp312-win_arm64.whl", hash = "sha256:e286f46a9a39c4a18b319c28f59b61de793654af2f395c102b4f819e584b5852", size = 8710102, upload-time = "2025-07-14T20:13:24.682Z" }, + { url = "https://files.pythonhosted.org/packages/a5/be/3fd5de0979fcb3994bfee0d65ed8ca9506a8a1260651b86174f6a86f52b3/pywin32-311-cp313-cp313-win32.whl", hash = "sha256:f95ba5a847cba10dd8c4d8fefa9f2a6cf283b8b88ed6178fa8a6c1ab16054d0d", size = 8705700, upload-time = "2025-07-14T20:13:26.471Z" }, + { url = "https://files.pythonhosted.org/packages/e3/28/e0a1909523c6890208295a29e05c2adb2126364e289826c0a8bc7297bd5c/pywin32-311-cp313-cp313-win_amd64.whl", hash = "sha256:718a38f7e5b058e76aee1c56ddd06908116d35147e133427e59a3983f703a20d", size = 9494700, upload-time = "2025-07-14T20:13:28.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/bf/90339ac0f55726dce7d794e6d79a18a91265bdf3aa70b6b9ca52f35e022a/pywin32-311-cp313-cp313-win_arm64.whl", hash = "sha256:7b4075d959648406202d92a2310cb990fea19b535c7f4a78d3f5e10b926eeb8a", size = 8709318, upload-time = "2025-07-14T20:13:30.348Z" }, + { url = "https://files.pythonhosted.org/packages/c9/31/097f2e132c4f16d99a22bfb777e0fd88bd8e1c634304e102f313af69ace5/pywin32-311-cp314-cp314-win32.whl", hash = "sha256:b7a2c10b93f8986666d0c803ee19b5990885872a7de910fc460f9b0c2fbf92ee", size = 8840714, upload-time = "2025-07-14T20:13:32.449Z" }, + { url = "https://files.pythonhosted.org/packages/90/4b/07c77d8ba0e01349358082713400435347df8426208171ce297da32c313d/pywin32-311-cp314-cp314-win_amd64.whl", hash = "sha256:3aca44c046bd2ed8c90de9cb8427f581c479e594e99b5c0bb19b29c10fd6cb87", size = 9656800, upload-time = "2025-07-14T20:13:34.312Z" }, + { url = "https://files.pythonhosted.org/packages/c0/d2/21af5c535501a7233e734b8af901574572da66fcc254cb35d0609c9080dd/pywin32-311-cp314-cp314-win_arm64.whl", hash = "sha256:a508e2d9025764a8270f93111a970e1d0fbfc33f4153b388bb649b7eec4f9b42", size = 8932540, upload-time = "2025-07-14T20:13:36.379Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +] + +[[package]] +name = "rapidfuzz" +version = "3.13.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/f6/6895abc3a3d056b9698da3199b04c0e56226d530ae44a470edabf8b664f0/rapidfuzz-3.13.0.tar.gz", hash = "sha256:d2eaf3839e52cbcc0accbe9817a67b4b0fcf70aaeb229cfddc1c28061f9ce5d8", size = 57904226, upload-time = "2025-04-03T20:38:51.226Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/27/ca10b3166024ae19a7e7c21f73c58dfd4b7fef7420e5497ee64ce6b73453/rapidfuzz-3.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aafc42a1dc5e1beeba52cd83baa41372228d6d8266f6d803c16dbabbcc156255", size = 1998899, upload-time = "2025-04-03T20:35:08.764Z" }, + { url = "https://files.pythonhosted.org/packages/f0/38/c4c404b13af0315483a6909b3a29636e18e1359307fb74a333fdccb3730d/rapidfuzz-3.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:85c9a131a44a95f9cac2eb6e65531db014e09d89c4f18c7b1fa54979cb9ff1f3", size = 1449949, upload-time = "2025-04-03T20:35:11.26Z" }, + { url = "https://files.pythonhosted.org/packages/12/ae/15c71d68a6df6b8e24595421fdf5bcb305888318e870b7be8d935a9187ee/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d7cec4242d30dd521ef91c0df872e14449d1dffc2a6990ede33943b0dae56c3", size = 1424199, upload-time = "2025-04-03T20:35:12.954Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9a/765beb9e14d7b30d12e2d6019e8b93747a0bedbc1d0cce13184fa3825426/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e297c09972698c95649e89121e3550cee761ca3640cd005e24aaa2619175464e", size = 5352400, upload-time = "2025-04-03T20:35:15.421Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b8/49479fe6f06b06cd54d6345ed16de3d1ac659b57730bdbe897df1e059471/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef0f5f03f61b0e5a57b1df7beafd83df993fd5811a09871bad6038d08e526d0d", size = 1652465, upload-time = "2025-04-03T20:35:18.43Z" }, + { url = "https://files.pythonhosted.org/packages/6f/d8/08823d496b7dd142a7b5d2da04337df6673a14677cfdb72f2604c64ead69/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8cf5f7cd6e4d5eb272baf6a54e182b2c237548d048e2882258336533f3f02b7", size = 1616590, upload-time = "2025-04-03T20:35:20.482Z" }, + { url = "https://files.pythonhosted.org/packages/38/d4/5cfbc9a997e544f07f301c54d42aac9e0d28d457d543169e4ec859b8ce0d/rapidfuzz-3.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9256218ac8f1a957806ec2fb9a6ddfc6c32ea937c0429e88cf16362a20ed8602", size = 3086956, upload-time = "2025-04-03T20:35:22.756Z" }, + { url = "https://files.pythonhosted.org/packages/25/1e/06d8932a72fa9576095234a15785136407acf8f9a7dbc8136389a3429da1/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1bdd2e6d0c5f9706ef7595773a81ca2b40f3b33fd7f9840b726fb00c6c4eb2e", size = 2494220, upload-time = "2025-04-03T20:35:25.563Z" }, + { url = "https://files.pythonhosted.org/packages/03/16/5acf15df63119d5ca3d9a54b82807866ff403461811d077201ca351a40c3/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:5280be8fd7e2bee5822e254fe0a5763aa0ad57054b85a32a3d9970e9b09bbcbf", size = 7585481, upload-time = "2025-04-03T20:35:27.426Z" }, + { url = "https://files.pythonhosted.org/packages/e1/cf/ebade4009431ea8e715e59e882477a970834ddaacd1a670095705b86bd0d/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd742c03885db1fce798a1cd87a20f47f144ccf26d75d52feb6f2bae3d57af05", size = 2894842, upload-time = "2025-04-03T20:35:29.457Z" }, + { url = "https://files.pythonhosted.org/packages/a7/bd/0732632bd3f906bf613229ee1b7cbfba77515db714a0e307becfa8a970ae/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:5435fcac94c9ecf0504bf88a8a60c55482c32e18e108d6079a0089c47f3f8cf6", size = 3438517, upload-time = "2025-04-03T20:35:31.381Z" }, + { url = "https://files.pythonhosted.org/packages/83/89/d3bd47ec9f4b0890f62aea143a1e35f78f3d8329b93d9495b4fa8a3cbfc3/rapidfuzz-3.13.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:93a755266856599be4ab6346273f192acde3102d7aa0735e2f48b456397a041f", size = 4412773, upload-time = "2025-04-03T20:35:33.425Z" }, + { url = "https://files.pythonhosted.org/packages/b3/57/1a152a07883e672fc117c7f553f5b933f6e43c431ac3fd0e8dae5008f481/rapidfuzz-3.13.0-cp310-cp310-win32.whl", hash = "sha256:3abe6a4e8eb4cfc4cda04dd650a2dc6d2934cbdeda5def7e6fd1c20f6e7d2a0b", size = 1842334, upload-time = "2025-04-03T20:35:35.648Z" }, + { url = "https://files.pythonhosted.org/packages/a7/68/7248addf95b6ca51fc9d955161072285da3059dd1472b0de773cff910963/rapidfuzz-3.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:e8ddb58961401da7d6f55f185512c0d6bd24f529a637078d41dd8ffa5a49c107", size = 1624392, upload-time = "2025-04-03T20:35:37.294Z" }, + { url = "https://files.pythonhosted.org/packages/68/23/f41c749f2c61ed1ed5575eaf9e73ef9406bfedbf20a3ffa438d15b5bf87e/rapidfuzz-3.13.0-cp310-cp310-win_arm64.whl", hash = "sha256:c523620d14ebd03a8d473c89e05fa1ae152821920c3ff78b839218ff69e19ca3", size = 865584, upload-time = "2025-04-03T20:35:39.005Z" }, + { url = "https://files.pythonhosted.org/packages/87/17/9be9eff5a3c7dfc831c2511262082c6786dca2ce21aa8194eef1cb71d67a/rapidfuzz-3.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d395a5cad0c09c7f096433e5fd4224d83b53298d53499945a9b0e5a971a84f3a", size = 1999453, upload-time = "2025-04-03T20:35:40.804Z" }, + { url = "https://files.pythonhosted.org/packages/75/67/62e57896ecbabe363f027d24cc769d55dd49019e576533ec10e492fcd8a2/rapidfuzz-3.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b7b3eda607a019169f7187328a8d1648fb9a90265087f6903d7ee3a8eee01805", size = 1450881, upload-time = "2025-04-03T20:35:42.734Z" }, + { url = "https://files.pythonhosted.org/packages/96/5c/691c5304857f3476a7b3df99e91efc32428cbe7d25d234e967cc08346c13/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98e0bfa602e1942d542de077baf15d658bd9d5dcfe9b762aff791724c1c38b70", size = 1422990, upload-time = "2025-04-03T20:35:45.158Z" }, + { url = "https://files.pythonhosted.org/packages/46/81/7a7e78f977496ee2d613154b86b203d373376bcaae5de7bde92f3ad5a192/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bef86df6d59667d9655905b02770a0c776d2853971c0773767d5ef8077acd624", size = 5342309, upload-time = "2025-04-03T20:35:46.952Z" }, + { url = "https://files.pythonhosted.org/packages/51/44/12fdd12a76b190fe94bf38d252bb28ddf0ab7a366b943e792803502901a2/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fedd316c165beed6307bf754dee54d3faca2c47e1f3bcbd67595001dfa11e969", size = 1656881, upload-time = "2025-04-03T20:35:49.954Z" }, + { url = "https://files.pythonhosted.org/packages/27/ae/0d933e660c06fcfb087a0d2492f98322f9348a28b2cc3791a5dbadf6e6fb/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5158da7f2ec02a930be13bac53bb5903527c073c90ee37804090614cab83c29e", size = 1608494, upload-time = "2025-04-03T20:35:51.646Z" }, + { url = "https://files.pythonhosted.org/packages/3d/2c/4b2f8aafdf9400e5599b6ed2f14bc26ca75f5a923571926ccbc998d4246a/rapidfuzz-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b6f913ee4618ddb6d6f3e387b76e8ec2fc5efee313a128809fbd44e65c2bbb2", size = 3072160, upload-time = "2025-04-03T20:35:53.472Z" }, + { url = "https://files.pythonhosted.org/packages/60/7d/030d68d9a653c301114101c3003b31ce01cf2c3224034cd26105224cd249/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d25fdbce6459ccbbbf23b4b044f56fbd1158b97ac50994eaae2a1c0baae78301", size = 2491549, upload-time = "2025-04-03T20:35:55.391Z" }, + { url = "https://files.pythonhosted.org/packages/8e/cd/7040ba538fc6a8ddc8816a05ecf46af9988b46c148ddd7f74fb0fb73d012/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:25343ccc589a4579fbde832e6a1e27258bfdd7f2eb0f28cb836d6694ab8591fc", size = 7584142, upload-time = "2025-04-03T20:35:57.71Z" }, + { url = "https://files.pythonhosted.org/packages/c1/96/85f7536fbceb0aa92c04a1c37a3fc4fcd4e80649e9ed0fb585382df82edc/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a9ad1f37894e3ffb76bbab76256e8a8b789657183870be11aa64e306bb5228fd", size = 2896234, upload-time = "2025-04-03T20:35:59.969Z" }, + { url = "https://files.pythonhosted.org/packages/55/fd/460e78438e7019f2462fe9d4ecc880577ba340df7974c8a4cfe8d8d029df/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5dc71ef23845bb6b62d194c39a97bb30ff171389c9812d83030c1199f319098c", size = 3437420, upload-time = "2025-04-03T20:36:01.91Z" }, + { url = "https://files.pythonhosted.org/packages/cc/df/c3c308a106a0993befd140a414c5ea78789d201cf1dfffb8fd9749718d4f/rapidfuzz-3.13.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b7f4c65facdb94f44be759bbd9b6dda1fa54d0d6169cdf1a209a5ab97d311a75", size = 4410860, upload-time = "2025-04-03T20:36:04.352Z" }, + { url = "https://files.pythonhosted.org/packages/75/ee/9d4ece247f9b26936cdeaae600e494af587ce9bf8ddc47d88435f05cfd05/rapidfuzz-3.13.0-cp311-cp311-win32.whl", hash = "sha256:b5104b62711565e0ff6deab2a8f5dbf1fbe333c5155abe26d2cfd6f1849b6c87", size = 1843161, upload-time = "2025-04-03T20:36:06.802Z" }, + { url = "https://files.pythonhosted.org/packages/c9/5a/d00e1f63564050a20279015acb29ecaf41646adfacc6ce2e1e450f7f2633/rapidfuzz-3.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:9093cdeb926deb32a4887ebe6910f57fbcdbc9fbfa52252c10b56ef2efb0289f", size = 1629962, upload-time = "2025-04-03T20:36:09.133Z" }, + { url = "https://files.pythonhosted.org/packages/3b/74/0a3de18bc2576b794f41ccd07720b623e840fda219ab57091897f2320fdd/rapidfuzz-3.13.0-cp311-cp311-win_arm64.whl", hash = "sha256:f70f646751b6aa9d05be1fb40372f006cc89d6aad54e9d79ae97bd1f5fce5203", size = 866631, upload-time = "2025-04-03T20:36:11.022Z" }, + { url = "https://files.pythonhosted.org/packages/13/4b/a326f57a4efed8f5505b25102797a58e37ee11d94afd9d9422cb7c76117e/rapidfuzz-3.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a1a6a906ba62f2556372282b1ef37b26bca67e3d2ea957277cfcefc6275cca7", size = 1989501, upload-time = "2025-04-03T20:36:13.43Z" }, + { url = "https://files.pythonhosted.org/packages/b7/53/1f7eb7ee83a06c400089ec7cb841cbd581c2edd7a4b21eb2f31030b88daa/rapidfuzz-3.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2fd0975e015b05c79a97f38883a11236f5a24cca83aa992bd2558ceaa5652b26", size = 1445379, upload-time = "2025-04-03T20:36:16.439Z" }, + { url = "https://files.pythonhosted.org/packages/07/09/de8069a4599cc8e6d194e5fa1782c561151dea7d5e2741767137e2a8c1f0/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d4e13593d298c50c4f94ce453f757b4b398af3fa0fd2fde693c3e51195b7f69", size = 1405986, upload-time = "2025-04-03T20:36:18.447Z" }, + { url = "https://files.pythonhosted.org/packages/5d/77/d9a90b39c16eca20d70fec4ca377fbe9ea4c0d358c6e4736ab0e0e78aaf6/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed6f416bda1c9133000009d84d9409823eb2358df0950231cc936e4bf784eb97", size = 5310809, upload-time = "2025-04-03T20:36:20.324Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7d/14da291b0d0f22262d19522afaf63bccf39fc027c981233fb2137a57b71f/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc82b6ed01acb536b94a43996a94471a218f4d89f3fdd9185ab496de4b2a981", size = 1629394, upload-time = "2025-04-03T20:36:22.256Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e4/79ed7e4fa58f37c0f8b7c0a62361f7089b221fe85738ae2dbcfb815e985a/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e9d824de871daa6e443b39ff495a884931970d567eb0dfa213d234337343835f", size = 1600544, upload-time = "2025-04-03T20:36:24.207Z" }, + { url = "https://files.pythonhosted.org/packages/4e/20/e62b4d13ba851b0f36370060025de50a264d625f6b4c32899085ed51f980/rapidfuzz-3.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d18228a2390375cf45726ce1af9d36ff3dc1f11dce9775eae1f1b13ac6ec50f", size = 3052796, upload-time = "2025-04-03T20:36:26.279Z" }, + { url = "https://files.pythonhosted.org/packages/cd/8d/55fdf4387dec10aa177fe3df8dbb0d5022224d95f48664a21d6b62a5299d/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5fe634c9482ec5d4a6692afb8c45d370ae86755e5f57aa6c50bfe4ca2bdd87", size = 2464016, upload-time = "2025-04-03T20:36:28.525Z" }, + { url = "https://files.pythonhosted.org/packages/9b/be/0872f6a56c0f473165d3b47d4170fa75263dc5f46985755aa9bf2bbcdea1/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:694eb531889f71022b2be86f625a4209c4049e74be9ca836919b9e395d5e33b3", size = 7556725, upload-time = "2025-04-03T20:36:30.629Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f3/6c0750e484d885a14840c7a150926f425d524982aca989cdda0bb3bdfa57/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:11b47b40650e06147dee5e51a9c9ad73bb7b86968b6f7d30e503b9f8dd1292db", size = 2859052, upload-time = "2025-04-03T20:36:32.836Z" }, + { url = "https://files.pythonhosted.org/packages/6f/98/5a3a14701b5eb330f444f7883c9840b43fb29c575e292e09c90a270a6e07/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:98b8107ff14f5af0243f27d236bcc6e1ef8e7e3b3c25df114e91e3a99572da73", size = 3390219, upload-time = "2025-04-03T20:36:35.062Z" }, + { url = "https://files.pythonhosted.org/packages/e9/7d/f4642eaaeb474b19974332f2a58471803448be843033e5740965775760a5/rapidfuzz-3.13.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b836f486dba0aceb2551e838ff3f514a38ee72b015364f739e526d720fdb823a", size = 4377924, upload-time = "2025-04-03T20:36:37.363Z" }, + { url = "https://files.pythonhosted.org/packages/8e/83/fa33f61796731891c3e045d0cbca4436a5c436a170e7f04d42c2423652c3/rapidfuzz-3.13.0-cp312-cp312-win32.whl", hash = "sha256:4671ee300d1818d7bdfd8fa0608580d7778ba701817216f0c17fb29e6b972514", size = 1823915, upload-time = "2025-04-03T20:36:39.451Z" }, + { url = "https://files.pythonhosted.org/packages/03/25/5ee7ab6841ca668567d0897905eebc79c76f6297b73bf05957be887e9c74/rapidfuzz-3.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e2065f68fb1d0bf65adc289c1bdc45ba7e464e406b319d67bb54441a1b9da9e", size = 1616985, upload-time = "2025-04-03T20:36:41.631Z" }, + { url = "https://files.pythonhosted.org/packages/76/5e/3f0fb88db396cb692aefd631e4805854e02120a2382723b90dcae720bcc6/rapidfuzz-3.13.0-cp312-cp312-win_arm64.whl", hash = "sha256:65cc97c2fc2c2fe23586599686f3b1ceeedeca8e598cfcc1b7e56dc8ca7e2aa7", size = 860116, upload-time = "2025-04-03T20:36:43.915Z" }, + { url = "https://files.pythonhosted.org/packages/0a/76/606e71e4227790750f1646f3c5c873e18d6cfeb6f9a77b2b8c4dec8f0f66/rapidfuzz-3.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09e908064d3684c541d312bd4c7b05acb99a2c764f6231bd507d4b4b65226c23", size = 1982282, upload-time = "2025-04-03T20:36:46.149Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f5/d0b48c6b902607a59fd5932a54e3518dae8223814db8349b0176e6e9444b/rapidfuzz-3.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:57c390336cb50d5d3bfb0cfe1467478a15733703af61f6dffb14b1cd312a6fae", size = 1439274, upload-time = "2025-04-03T20:36:48.323Z" }, + { url = "https://files.pythonhosted.org/packages/59/cf/c3ac8c80d8ced6c1f99b5d9674d397ce5d0e9d0939d788d67c010e19c65f/rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0da54aa8547b3c2c188db3d1c7eb4d1bb6dd80baa8cdaeaec3d1da3346ec9caa", size = 1399854, upload-time = "2025-04-03T20:36:50.294Z" }, + { url = "https://files.pythonhosted.org/packages/09/5d/ca8698e452b349c8313faf07bfa84e7d1c2d2edf7ccc67bcfc49bee1259a/rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df8e8c21e67afb9d7fbe18f42c6111fe155e801ab103c81109a61312927cc611", size = 5308962, upload-time = "2025-04-03T20:36:52.421Z" }, + { url = "https://files.pythonhosted.org/packages/66/0a/bebada332854e78e68f3d6c05226b23faca79d71362509dbcf7b002e33b7/rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:461fd13250a2adf8e90ca9a0e1e166515cbcaa5e9c3b1f37545cbbeff9e77f6b", size = 1625016, upload-time = "2025-04-03T20:36:54.639Z" }, + { url = "https://files.pythonhosted.org/packages/de/0c/9e58d4887b86d7121d1c519f7050d1be5eb189d8a8075f5417df6492b4f5/rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c2b3dd5d206a12deca16870acc0d6e5036abeb70e3cad6549c294eff15591527", size = 1600414, upload-time = "2025-04-03T20:36:56.669Z" }, + { url = "https://files.pythonhosted.org/packages/9b/df/6096bc669c1311568840bdcbb5a893edc972d1c8d2b4b4325c21d54da5b1/rapidfuzz-3.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1343d745fbf4688e412d8f398c6e6d6f269db99a54456873f232ba2e7aeb4939", size = 3053179, upload-time = "2025-04-03T20:36:59.366Z" }, + { url = "https://files.pythonhosted.org/packages/f9/46/5179c583b75fce3e65a5cd79a3561bd19abd54518cb7c483a89b284bf2b9/rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b1b065f370d54551dcc785c6f9eeb5bd517ae14c983d2784c064b3aa525896df", size = 2456856, upload-time = "2025-04-03T20:37:01.708Z" }, + { url = "https://files.pythonhosted.org/packages/6b/64/e9804212e3286d027ac35bbb66603c9456c2bce23f823b67d2f5cabc05c1/rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:11b125d8edd67e767b2295eac6eb9afe0b1cdc82ea3d4b9257da4b8e06077798", size = 7567107, upload-time = "2025-04-03T20:37:04.521Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f2/7d69e7bf4daec62769b11757ffc31f69afb3ce248947aadbb109fefd9f65/rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c33f9c841630b2bb7e69a3fb5c84a854075bb812c47620978bddc591f764da3d", size = 2854192, upload-time = "2025-04-03T20:37:06.905Z" }, + { url = "https://files.pythonhosted.org/packages/05/21/ab4ad7d7d0f653e6fe2e4ccf11d0245092bef94cdff587a21e534e57bda8/rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ae4574cb66cf1e85d32bb7e9ec45af5409c5b3970b7ceb8dea90168024127566", size = 3398876, upload-time = "2025-04-03T20:37:09.692Z" }, + { url = "https://files.pythonhosted.org/packages/0f/a8/45bba94c2489cb1ee0130dcb46e1df4fa2c2b25269e21ffd15240a80322b/rapidfuzz-3.13.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e05752418b24bbd411841b256344c26f57da1148c5509e34ea39c7eb5099ab72", size = 4377077, upload-time = "2025-04-03T20:37:11.929Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f3/5e0c6ae452cbb74e5436d3445467447e8c32f3021f48f93f15934b8cffc2/rapidfuzz-3.13.0-cp313-cp313-win32.whl", hash = "sha256:0e1d08cb884805a543f2de1f6744069495ef527e279e05370dd7c83416af83f8", size = 1822066, upload-time = "2025-04-03T20:37:14.425Z" }, + { url = "https://files.pythonhosted.org/packages/96/e3/a98c25c4f74051df4dcf2f393176b8663bfd93c7afc6692c84e96de147a2/rapidfuzz-3.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9a7c6232be5f809cd39da30ee5d24e6cadd919831e6020ec6c2391f4c3bc9264", size = 1615100, upload-time = "2025-04-03T20:37:16.611Z" }, + { url = "https://files.pythonhosted.org/packages/60/b1/05cd5e697c00cd46d7791915f571b38c8531f714832eff2c5e34537c49ee/rapidfuzz-3.13.0-cp313-cp313-win_arm64.whl", hash = "sha256:3f32f15bacd1838c929b35c84b43618481e1b3d7a61b5ed2db0291b70ae88b53", size = 858976, upload-time = "2025-04-03T20:37:19.336Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e1/f5d85ae3c53df6f817ca70dbdd37c83f31e64caced5bb867bec6b43d1fdf/rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe5790a36d33a5d0a6a1f802aa42ecae282bf29ac6f7506d8e12510847b82a45", size = 1904437, upload-time = "2025-04-03T20:38:00.255Z" }, + { url = "https://files.pythonhosted.org/packages/db/d7/ded50603dddc5eb182b7ce547a523ab67b3bf42b89736f93a230a398a445/rapidfuzz-3.13.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:cdb33ee9f8a8e4742c6b268fa6bd739024f34651a06b26913381b1413ebe7590", size = 1383126, upload-time = "2025-04-03T20:38:02.676Z" }, + { url = "https://files.pythonhosted.org/packages/c4/48/6f795e793babb0120b63a165496d64f989b9438efbeed3357d9a226ce575/rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c99b76b93f7b495eee7dcb0d6a38fb3ce91e72e99d9f78faa5664a881cb2b7d", size = 1365565, upload-time = "2025-04-03T20:38:06.646Z" }, + { url = "https://files.pythonhosted.org/packages/f0/50/0062a959a2d72ed17815824e40e2eefdb26f6c51d627389514510a7875f3/rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6af42f2ede8b596a6aaf6d49fdee3066ca578f4856b85ab5c1e2145de367a12d", size = 5251719, upload-time = "2025-04-03T20:38:09.191Z" }, + { url = "https://files.pythonhosted.org/packages/e7/02/bd8b70cd98b7a88e1621264778ac830c9daa7745cd63e838bd773b1aeebd/rapidfuzz-3.13.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c0efa73afbc5b265aca0d8a467ae2a3f40d6854cbe1481cb442a62b7bf23c99", size = 2991095, upload-time = "2025-04-03T20:38:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/9f/8d/632d895cdae8356826184864d74a5f487d40cb79f50a9137510524a1ba86/rapidfuzz-3.13.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7ac21489de962a4e2fc1e8f0b0da4aa1adc6ab9512fd845563fecb4b4c52093a", size = 1553888, upload-time = "2025-04-03T20:38:15.357Z" }, + { url = "https://files.pythonhosted.org/packages/88/df/6060c5a9c879b302bd47a73fc012d0db37abf6544c57591bcbc3459673bd/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1ba007f4d35a45ee68656b2eb83b8715e11d0f90e5b9f02d615a8a321ff00c27", size = 1905935, upload-time = "2025-04-03T20:38:18.07Z" }, + { url = "https://files.pythonhosted.org/packages/a2/6c/a0b819b829e20525ef1bd58fc776fb8d07a0c38d819e63ba2b7c311a2ed4/rapidfuzz-3.13.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d7a217310429b43be95b3b8ad7f8fc41aba341109dc91e978cd7c703f928c58f", size = 1383714, upload-time = "2025-04-03T20:38:20.628Z" }, + { url = "https://files.pythonhosted.org/packages/6a/c1/3da3466cc8a9bfb9cd345ad221fac311143b6a9664b5af4adb95b5e6ce01/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:558bf526bcd777de32b7885790a95a9548ffdcce68f704a81207be4a286c1095", size = 1367329, upload-time = "2025-04-03T20:38:23.01Z" }, + { url = "https://files.pythonhosted.org/packages/da/f0/9f2a9043bfc4e66da256b15d728c5fc2d865edf0028824337f5edac36783/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:202a87760f5145140d56153b193a797ae9338f7939eb16652dd7ff96f8faf64c", size = 5251057, upload-time = "2025-04-03T20:38:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/6a/ff/af2cb1d8acf9777d52487af5c6b34ce9d13381a753f991d95ecaca813407/rapidfuzz-3.13.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cfcccc08f671646ccb1e413c773bb92e7bba789e3a1796fd49d23c12539fe2e4", size = 2992401, upload-time = "2025-04-03T20:38:28.196Z" }, + { url = "https://files.pythonhosted.org/packages/c1/c5/c243b05a15a27b946180db0d1e4c999bef3f4221505dff9748f1f6c917be/rapidfuzz-3.13.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:1f219f1e3c3194d7a7de222f54450ce12bc907862ff9a8962d83061c1f923c86", size = 1553782, upload-time = "2025-04-03T20:38:30.778Z" }, +] + [[package]] name = "referencing" version = "0.36.2" @@ -1337,6 +2193,32 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, ] +[[package]] +name = "rich-pyfiglet" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/26/65/4495d07dacdcbfc9057e47c60be218acd4354a5ce53f93a97d51e1bb98e6/rich_pyfiglet-0.1.4.tar.gz", hash = "sha256:78f6d949bd254ae4bfd6d239ea0a4a90084310bbe9157c1ce351bab092de0a3c", size = 1469273, upload-time = "2025-06-03T10:45:53.719Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/71/b33fedbc66209e41dd3f3644a541eb7b32a3c0daee9ca315787f6ca3c320/rich_pyfiglet-0.1.4-py3-none-any.whl", hash = "sha256:7af138293050c0d28e4224103e02c8665d5dd9a7ef578292b65ae80cff4d337c", size = 1645134, upload-time = "2025-06-03T10:45:52.178Z" }, +] + +[[package]] +name = "rich-rst" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "rich" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b0/69/5514c3a87b5f10f09a34bb011bc0927bc12c596c8dae5915604e71abc386/rich_rst-1.3.1.tar.gz", hash = "sha256:fad46e3ba42785ea8c1785e2ceaa56e0ffa32dbe5410dec432f37e4107c4f383", size = 13839, upload-time = "2024-04-30T04:40:38.125Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/bc/cc4e3dbc5e7992398dcb7a8eda0cbcf4fb792a0cdb93f857b478bf3cf884/rich_rst-1.3.1-py3-none-any.whl", hash = "sha256:498a74e3896507ab04492d326e794c3ef76e7cda078703aa592d1853d91098c1", size = 11621, upload-time = "2024-04-30T04:40:32.619Z" }, +] + [[package]] name = "rpds-py" version = "0.26.0" @@ -1475,6 +2357,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, ] +[[package]] +name = "ruff" +version = "0.12.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9b/ce/8d7dbedede481245b489b769d27e2934730791a9a82765cb94566c6e6abd/ruff-0.12.4.tar.gz", hash = "sha256:13efa16df6c6eeb7d0f091abae50f58e9522f3843edb40d56ad52a5a4a4b6873", size = 5131435, upload-time = "2025-07-17T17:27:19.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ae/9f/517bc5f61bad205b7f36684ffa5415c013862dee02f55f38a217bdbe7aa4/ruff-0.12.4-py3-none-linux_armv6l.whl", hash = "sha256:cb0d261dac457ab939aeb247e804125a5d521b21adf27e721895b0d3f83a0d0a", size = 10188824, upload-time = "2025-07-17T17:26:31.412Z" }, + { url = "https://files.pythonhosted.org/packages/28/83/691baae5a11fbbde91df01c565c650fd17b0eabed259e8b7563de17c6529/ruff-0.12.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:55c0f4ca9769408d9b9bac530c30d3e66490bd2beb2d3dae3e4128a1f05c7442", size = 10884521, upload-time = "2025-07-17T17:26:35.084Z" }, + { url = "https://files.pythonhosted.org/packages/d6/8d/756d780ff4076e6dd035d058fa220345f8c458391f7edfb1c10731eedc75/ruff-0.12.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:a8224cc3722c9ad9044da7f89c4c1ec452aef2cfe3904365025dd2f51daeae0e", size = 10277653, upload-time = "2025-07-17T17:26:37.897Z" }, + { url = "https://files.pythonhosted.org/packages/8d/97/8eeee0f48ece153206dce730fc9e0e0ca54fd7f261bb3d99c0a4343a1892/ruff-0.12.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9949d01d64fa3672449a51ddb5d7548b33e130240ad418884ee6efa7a229586", size = 10485993, upload-time = "2025-07-17T17:26:40.68Z" }, + { url = "https://files.pythonhosted.org/packages/49/b8/22a43d23a1f68df9b88f952616c8508ea6ce4ed4f15353b8168c48b2d7e7/ruff-0.12.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:be0593c69df9ad1465e8a2d10e3defd111fdb62dcd5be23ae2c06da77e8fcffb", size = 10022824, upload-time = "2025-07-17T17:26:43.564Z" }, + { url = "https://files.pythonhosted.org/packages/cd/70/37c234c220366993e8cffcbd6cadbf332bfc848cbd6f45b02bade17e0149/ruff-0.12.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7dea966bcb55d4ecc4cc3270bccb6f87a337326c9dcd3c07d5b97000dbff41c", size = 11524414, upload-time = "2025-07-17T17:26:46.219Z" }, + { url = "https://files.pythonhosted.org/packages/14/77/c30f9964f481b5e0e29dd6a1fae1f769ac3fd468eb76fdd5661936edd262/ruff-0.12.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:afcfa3ab5ab5dd0e1c39bf286d829e042a15e966b3726eea79528e2e24d8371a", size = 12419216, upload-time = "2025-07-17T17:26:48.883Z" }, + { url = "https://files.pythonhosted.org/packages/6e/79/af7fe0a4202dce4ef62c5e33fecbed07f0178f5b4dd9c0d2fcff5ab4a47c/ruff-0.12.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c057ce464b1413c926cdb203a0f858cd52f3e73dcb3270a3318d1630f6395bb3", size = 11976756, upload-time = "2025-07-17T17:26:51.754Z" }, + { url = "https://files.pythonhosted.org/packages/09/d1/33fb1fc00e20a939c305dbe2f80df7c28ba9193f7a85470b982815a2dc6a/ruff-0.12.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e64b90d1122dc2713330350626b10d60818930819623abbb56535c6466cce045", size = 11020019, upload-time = "2025-07-17T17:26:54.265Z" }, + { url = "https://files.pythonhosted.org/packages/64/f4/e3cd7f7bda646526f09693e2e02bd83d85fff8a8222c52cf9681c0d30843/ruff-0.12.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2abc48f3d9667fdc74022380b5c745873499ff827393a636f7a59da1515e7c57", size = 11277890, upload-time = "2025-07-17T17:26:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/5e/d0/69a85fb8b94501ff1a4f95b7591505e8983f38823da6941eb5b6badb1e3a/ruff-0.12.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2b2449dc0c138d877d629bea151bee8c0ae3b8e9c43f5fcaafcd0c0d0726b184", size = 10348539, upload-time = "2025-07-17T17:26:59.381Z" }, + { url = "https://files.pythonhosted.org/packages/16/a0/91372d1cb1678f7d42d4893b88c252b01ff1dffcad09ae0c51aa2542275f/ruff-0.12.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:56e45bb11f625db55f9b70477062e6a1a04d53628eda7784dce6e0f55fd549eb", size = 10009579, upload-time = "2025-07-17T17:27:02.462Z" }, + { url = "https://files.pythonhosted.org/packages/23/1b/c4a833e3114d2cc0f677e58f1df6c3b20f62328dbfa710b87a1636a5e8eb/ruff-0.12.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:478fccdb82ca148a98a9ff43658944f7ab5ec41c3c49d77cd99d44da019371a1", size = 10942982, upload-time = "2025-07-17T17:27:05.343Z" }, + { url = "https://files.pythonhosted.org/packages/ff/ce/ce85e445cf0a5dd8842f2f0c6f0018eedb164a92bdf3eda51984ffd4d989/ruff-0.12.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:0fc426bec2e4e5f4c4f182b9d2ce6a75c85ba9bcdbe5c6f2a74fcb8df437df4b", size = 11343331, upload-time = "2025-07-17T17:27:08.652Z" }, + { url = "https://files.pythonhosted.org/packages/35/cf/441b7fc58368455233cfb5b77206c849b6dfb48b23de532adcc2e50ccc06/ruff-0.12.4-py3-none-win32.whl", hash = "sha256:4de27977827893cdfb1211d42d84bc180fceb7b72471104671c59be37041cf93", size = 10267904, upload-time = "2025-07-17T17:27:11.814Z" }, + { url = "https://files.pythonhosted.org/packages/ce/7e/20af4a0df5e1299e7368d5ea4350412226afb03d95507faae94c80f00afd/ruff-0.12.4-py3-none-win_amd64.whl", hash = "sha256:fe0b9e9eb23736b453143d72d2ceca5db323963330d5b7859d60d101147d461a", size = 11209038, upload-time = "2025-07-17T17:27:14.417Z" }, + { url = "https://files.pythonhosted.org/packages/11/02/8857d0dfb8f44ef299a5dfd898f673edefb71e3b533b3b9d2db4c832dd13/ruff-0.12.4-py3-none-win_arm64.whl", hash = "sha256:0618ec4442a83ab545e5b71202a5c0ed7791e8471435b94e655b570a5031a98e", size = 10469336, upload-time = "2025-07-17T17:27:16.913Z" }, +] + [[package]] name = "shellingham" version = "1.5.4" @@ -1590,6 +2497,57 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/82/95/38ef0cd7fa11eaba6a99b3c4f5ac948d8bc6ff199aabd327a29cc000840c/starlette-0.47.1-py3-none-any.whl", hash = "sha256:5e11c9f5c7c3f24959edbf2dffdc01bba860228acf657129467d8a7468591527", size = 72747, upload-time = "2025-06-21T04:03:15.705Z" }, ] +[[package]] +name = "thefuzz" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rapidfuzz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/81/4b/d3eb25831590d6d7d38c2f2e3561d3ba41d490dc89cd91d9e65e7c812508/thefuzz-0.22.1.tar.gz", hash = "sha256:7138039a7ecf540da323792d8592ef9902b1d79eb78c147d4f20664de79f3680", size = 19993, upload-time = "2024-01-19T19:18:23.135Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/82/4f/1695e70ceb3604f19eda9908e289c687ea81c4fecef4d90a9d1d0f2f7ae9/thefuzz-0.22.1-py3-none-any.whl", hash = "sha256:59729b33556850b90e1093c4cf9e618af6f2e4c985df193fdf3c5b5cf02ca481", size = 8245, upload-time = "2024-01-19T19:18:20.362Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + [[package]] name = "typer" version = "0.16.0" @@ -1657,3 +2615,116 @@ sdist = { url = "https://files.pythonhosted.org/packages/5e/42/e0e305207bb88c6b8 wheels = [ { url = "https://files.pythonhosted.org/packages/d2/e2/dc81b1bd1dcfe91735810265e9d26bc8ec5da45b4c0f6237e286819194c3/uvicorn-0.35.0-py3-none-any.whl", hash = "sha256:197535216b25ff9b785e29a0b79199f55222193d47f820816e7da751e9bc8d4a", size = 66406, upload-time = "2025-06-28T16:15:44.816Z" }, ] + +[[package]] +name = "virtualenv" +version = "20.31.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/56/2c/444f465fb2c65f40c3a104fd0c495184c4f2336d65baf398e3c75d72ea94/virtualenv-20.31.2.tar.gz", hash = "sha256:e10c0a9d02835e592521be48b332b6caee6887f332c111aa79a09b9e79efc2af", size = 6076316, upload-time = "2025-05-08T17:58:23.811Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/40/b1c265d4b2b62b58576588510fc4d1fe60a86319c8de99fd8e9fec617d2c/virtualenv-20.31.2-py3-none-any.whl", hash = "sha256:36efd0d9650ee985f0cad72065001e66d49a6f24eb44d98980f630686243cf11", size = 6057982, upload-time = "2025-05-08T17:58:21.15Z" }, +] + +[[package]] +name = "yarl" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/fb/efaa23fa4e45537b827620f04cf8f3cd658b76642205162e072703a5b963/yarl-1.20.1.tar.gz", hash = "sha256:d017a4997ee50c91fd5466cef416231bb82177b93b029906cefc542ce14c35ac", size = 186428, upload-time = "2025-06-10T00:46:09.923Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/65/7fed0d774abf47487c64be14e9223749468922817b5e8792b8a64792a1bb/yarl-1.20.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6032e6da6abd41e4acda34d75a816012717000fa6839f37124a47fcefc49bec4", size = 132910, upload-time = "2025-06-10T00:42:31.108Z" }, + { url = "https://files.pythonhosted.org/packages/8a/7b/988f55a52da99df9e56dc733b8e4e5a6ae2090081dc2754fc8fd34e60aa0/yarl-1.20.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2c7b34d804b8cf9b214f05015c4fee2ebe7ed05cf581e7192c06555c71f4446a", size = 90644, upload-time = "2025-06-10T00:42:33.851Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/30d98f03e95d30c7e3cc093759982d038c8833ec2451001d45ef4854edc1/yarl-1.20.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c869f2651cc77465f6cd01d938d91a11d9ea5d798738c1dc077f3de0b5e5fed", size = 89322, upload-time = "2025-06-10T00:42:35.688Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7a/f2f314f5ebfe9200724b0b748de2186b927acb334cf964fd312eb86fc286/yarl-1.20.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62915e6688eb4d180d93840cda4110995ad50c459bf931b8b3775b37c264af1e", size = 323786, upload-time = "2025-06-10T00:42:37.817Z" }, + { url = "https://files.pythonhosted.org/packages/15/3f/718d26f189db96d993d14b984ce91de52e76309d0fd1d4296f34039856aa/yarl-1.20.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:41ebd28167bc6af8abb97fec1a399f412eec5fd61a3ccbe2305a18b84fb4ca73", size = 319627, upload-time = "2025-06-10T00:42:39.937Z" }, + { url = "https://files.pythonhosted.org/packages/a5/76/8fcfbf5fa2369157b9898962a4a7d96764b287b085b5b3d9ffae69cdefd1/yarl-1.20.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:21242b4288a6d56f04ea193adde174b7e347ac46ce6bc84989ff7c1b1ecea84e", size = 339149, upload-time = "2025-06-10T00:42:42.627Z" }, + { url = "https://files.pythonhosted.org/packages/3c/95/d7fc301cc4661785967acc04f54a4a42d5124905e27db27bb578aac49b5c/yarl-1.20.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bea21cdae6c7eb02ba02a475f37463abfe0a01f5d7200121b03e605d6a0439f8", size = 333327, upload-time = "2025-06-10T00:42:44.842Z" }, + { url = "https://files.pythonhosted.org/packages/65/94/e21269718349582eee81efc5c1c08ee71c816bfc1585b77d0ec3f58089eb/yarl-1.20.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f8a891e4a22a89f5dde7862994485e19db246b70bb288d3ce73a34422e55b23", size = 326054, upload-time = "2025-06-10T00:42:47.149Z" }, + { url = "https://files.pythonhosted.org/packages/32/ae/8616d1f07853704523519f6131d21f092e567c5af93de7e3e94b38d7f065/yarl-1.20.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd803820d44c8853a109a34e3660e5a61beae12970da479cf44aa2954019bf70", size = 315035, upload-time = "2025-06-10T00:42:48.852Z" }, + { url = "https://files.pythonhosted.org/packages/48/aa/0ace06280861ef055855333707db5e49c6e3a08840a7ce62682259d0a6c0/yarl-1.20.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b982fa7f74c80d5c0c7b5b38f908971e513380a10fecea528091405f519b9ebb", size = 338962, upload-time = "2025-06-10T00:42:51.024Z" }, + { url = "https://files.pythonhosted.org/packages/20/52/1e9d0e6916f45a8fb50e6844f01cb34692455f1acd548606cbda8134cd1e/yarl-1.20.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:33f29ecfe0330c570d997bcf1afd304377f2e48f61447f37e846a6058a4d33b2", size = 335399, upload-time = "2025-06-10T00:42:53.007Z" }, + { url = "https://files.pythonhosted.org/packages/f2/65/60452df742952c630e82f394cd409de10610481d9043aa14c61bf846b7b1/yarl-1.20.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:835ab2cfc74d5eb4a6a528c57f05688099da41cf4957cf08cad38647e4a83b30", size = 338649, upload-time = "2025-06-10T00:42:54.964Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f5/6cd4ff38dcde57a70f23719a838665ee17079640c77087404c3d34da6727/yarl-1.20.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:46b5e0ccf1943a9a6e766b2c2b8c732c55b34e28be57d8daa2b3c1d1d4009309", size = 358563, upload-time = "2025-06-10T00:42:57.28Z" }, + { url = "https://files.pythonhosted.org/packages/d1/90/c42eefd79d0d8222cb3227bdd51b640c0c1d0aa33fe4cc86c36eccba77d3/yarl-1.20.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:df47c55f7d74127d1b11251fe6397d84afdde0d53b90bedb46a23c0e534f9d24", size = 357609, upload-time = "2025-06-10T00:42:59.055Z" }, + { url = "https://files.pythonhosted.org/packages/03/c8/cea6b232cb4617514232e0f8a718153a95b5d82b5290711b201545825532/yarl-1.20.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:76d12524d05841276b0e22573f28d5fbcb67589836772ae9244d90dd7d66aa13", size = 350224, upload-time = "2025-06-10T00:43:01.248Z" }, + { url = "https://files.pythonhosted.org/packages/ce/a3/eaa0ab9712f1f3d01faf43cf6f1f7210ce4ea4a7e9b28b489a2261ca8db9/yarl-1.20.1-cp310-cp310-win32.whl", hash = "sha256:6c4fbf6b02d70e512d7ade4b1f998f237137f1417ab07ec06358ea04f69134f8", size = 81753, upload-time = "2025-06-10T00:43:03.486Z" }, + { url = "https://files.pythonhosted.org/packages/8f/34/e4abde70a9256465fe31c88ed02c3f8502b7b5dead693a4f350a06413f28/yarl-1.20.1-cp310-cp310-win_amd64.whl", hash = "sha256:aef6c4d69554d44b7f9d923245f8ad9a707d971e6209d51279196d8e8fe1ae16", size = 86817, upload-time = "2025-06-10T00:43:05.231Z" }, + { url = "https://files.pythonhosted.org/packages/b1/18/893b50efc2350e47a874c5c2d67e55a0ea5df91186b2a6f5ac52eff887cd/yarl-1.20.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:47ee6188fea634bdfaeb2cc420f5b3b17332e6225ce88149a17c413c77ff269e", size = 133833, upload-time = "2025-06-10T00:43:07.393Z" }, + { url = "https://files.pythonhosted.org/packages/89/ed/b8773448030e6fc47fa797f099ab9eab151a43a25717f9ac043844ad5ea3/yarl-1.20.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d0f6500f69e8402d513e5eedb77a4e1818691e8f45e6b687147963514d84b44b", size = 91070, upload-time = "2025-06-10T00:43:09.538Z" }, + { url = "https://files.pythonhosted.org/packages/e3/e3/409bd17b1e42619bf69f60e4f031ce1ccb29bd7380117a55529e76933464/yarl-1.20.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7a8900a42fcdaad568de58887c7b2f602962356908eedb7628eaf6021a6e435b", size = 89818, upload-time = "2025-06-10T00:43:11.575Z" }, + { url = "https://files.pythonhosted.org/packages/f8/77/64d8431a4d77c856eb2d82aa3de2ad6741365245a29b3a9543cd598ed8c5/yarl-1.20.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bad6d131fda8ef508b36be3ece16d0902e80b88ea7200f030a0f6c11d9e508d4", size = 347003, upload-time = "2025-06-10T00:43:14.088Z" }, + { url = "https://files.pythonhosted.org/packages/8d/d2/0c7e4def093dcef0bd9fa22d4d24b023788b0a33b8d0088b51aa51e21e99/yarl-1.20.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:df018d92fe22aaebb679a7f89fe0c0f368ec497e3dda6cb81a567610f04501f1", size = 336537, upload-time = "2025-06-10T00:43:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/f0/f3/fc514f4b2cf02cb59d10cbfe228691d25929ce8f72a38db07d3febc3f706/yarl-1.20.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f969afbb0a9b63c18d0feecf0db09d164b7a44a053e78a7d05f5df163e43833", size = 362358, upload-time = "2025-06-10T00:43:18.704Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6d/a313ac8d8391381ff9006ac05f1d4331cee3b1efaa833a53d12253733255/yarl-1.20.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:812303eb4aa98e302886ccda58d6b099e3576b1b9276161469c25803a8db277d", size = 357362, upload-time = "2025-06-10T00:43:20.888Z" }, + { url = "https://files.pythonhosted.org/packages/00/70/8f78a95d6935a70263d46caa3dd18e1f223cf2f2ff2037baa01a22bc5b22/yarl-1.20.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98c4a7d166635147924aa0bf9bfe8d8abad6fffa6102de9c99ea04a1376f91e8", size = 348979, upload-time = "2025-06-10T00:43:23.169Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/42773027968968f4f15143553970ee36ead27038d627f457cc44bbbeecf3/yarl-1.20.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12e768f966538e81e6e7550f9086a6236b16e26cd964cf4df35349970f3551cf", size = 337274, upload-time = "2025-06-10T00:43:27.111Z" }, + { url = "https://files.pythonhosted.org/packages/05/be/665634aa196954156741ea591d2f946f1b78ceee8bb8f28488bf28c0dd62/yarl-1.20.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe41919b9d899661c5c28a8b4b0acf704510b88f27f0934ac7a7bebdd8938d5e", size = 363294, upload-time = "2025-06-10T00:43:28.96Z" }, + { url = "https://files.pythonhosted.org/packages/eb/90/73448401d36fa4e210ece5579895731f190d5119c4b66b43b52182e88cd5/yarl-1.20.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:8601bc010d1d7780592f3fc1bdc6c72e2b6466ea34569778422943e1a1f3c389", size = 358169, upload-time = "2025-06-10T00:43:30.701Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b0/fce922d46dc1eb43c811f1889f7daa6001b27a4005587e94878570300881/yarl-1.20.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:daadbdc1f2a9033a2399c42646fbd46da7992e868a5fe9513860122d7fe7a73f", size = 362776, upload-time = "2025-06-10T00:43:32.51Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0d/b172628fce039dae8977fd22caeff3eeebffd52e86060413f5673767c427/yarl-1.20.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:03aa1e041727cb438ca762628109ef1333498b122e4c76dd858d186a37cec845", size = 381341, upload-time = "2025-06-10T00:43:34.543Z" }, + { url = "https://files.pythonhosted.org/packages/6b/9b/5b886d7671f4580209e855974fe1cecec409aa4a89ea58b8f0560dc529b1/yarl-1.20.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:642980ef5e0fa1de5fa96d905c7e00cb2c47cb468bfcac5a18c58e27dbf8d8d1", size = 379988, upload-time = "2025-06-10T00:43:36.489Z" }, + { url = "https://files.pythonhosted.org/packages/73/be/75ef5fd0fcd8f083a5d13f78fd3f009528132a1f2a1d7c925c39fa20aa79/yarl-1.20.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:86971e2795584fe8c002356d3b97ef6c61862720eeff03db2a7c86b678d85b3e", size = 371113, upload-time = "2025-06-10T00:43:38.592Z" }, + { url = "https://files.pythonhosted.org/packages/50/4f/62faab3b479dfdcb741fe9e3f0323e2a7d5cd1ab2edc73221d57ad4834b2/yarl-1.20.1-cp311-cp311-win32.whl", hash = "sha256:597f40615b8d25812f14562699e287f0dcc035d25eb74da72cae043bb884d773", size = 81485, upload-time = "2025-06-10T00:43:41.038Z" }, + { url = "https://files.pythonhosted.org/packages/f0/09/d9c7942f8f05c32ec72cd5c8e041c8b29b5807328b68b4801ff2511d4d5e/yarl-1.20.1-cp311-cp311-win_amd64.whl", hash = "sha256:26ef53a9e726e61e9cd1cda6b478f17e350fb5800b4bd1cd9fe81c4d91cfeb2e", size = 86686, upload-time = "2025-06-10T00:43:42.692Z" }, + { url = "https://files.pythonhosted.org/packages/5f/9a/cb7fad7d73c69f296eda6815e4a2c7ed53fc70c2f136479a91c8e5fbdb6d/yarl-1.20.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdcc4cd244e58593a4379fe60fdee5ac0331f8eb70320a24d591a3be197b94a9", size = 133667, upload-time = "2025-06-10T00:43:44.369Z" }, + { url = "https://files.pythonhosted.org/packages/67/38/688577a1cb1e656e3971fb66a3492501c5a5df56d99722e57c98249e5b8a/yarl-1.20.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b29a2c385a5f5b9c7d9347e5812b6f7ab267193c62d282a540b4fc528c8a9d2a", size = 91025, upload-time = "2025-06-10T00:43:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/50/ec/72991ae51febeb11a42813fc259f0d4c8e0507f2b74b5514618d8b640365/yarl-1.20.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1112ae8154186dfe2de4732197f59c05a83dc814849a5ced892b708033f40dc2", size = 89709, upload-time = "2025-06-10T00:43:48.22Z" }, + { url = "https://files.pythonhosted.org/packages/99/da/4d798025490e89426e9f976702e5f9482005c548c579bdae792a4c37769e/yarl-1.20.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:90bbd29c4fe234233f7fa2b9b121fb63c321830e5d05b45153a2ca68f7d310ee", size = 352287, upload-time = "2025-06-10T00:43:49.924Z" }, + { url = "https://files.pythonhosted.org/packages/1a/26/54a15c6a567aac1c61b18aa0f4b8aa2e285a52d547d1be8bf48abe2b3991/yarl-1.20.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:680e19c7ce3710ac4cd964e90dad99bf9b5029372ba0c7cbfcd55e54d90ea819", size = 345429, upload-time = "2025-06-10T00:43:51.7Z" }, + { url = "https://files.pythonhosted.org/packages/d6/95/9dcf2386cb875b234353b93ec43e40219e14900e046bf6ac118f94b1e353/yarl-1.20.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a979218c1fdb4246a05efc2cc23859d47c89af463a90b99b7c56094daf25a16", size = 365429, upload-time = "2025-06-10T00:43:53.494Z" }, + { url = "https://files.pythonhosted.org/packages/91/b2/33a8750f6a4bc224242a635f5f2cff6d6ad5ba651f6edcccf721992c21a0/yarl-1.20.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255b468adf57b4a7b65d8aad5b5138dce6a0752c139965711bdcb81bc370e1b6", size = 363862, upload-time = "2025-06-10T00:43:55.766Z" }, + { url = "https://files.pythonhosted.org/packages/98/28/3ab7acc5b51f4434b181b0cee8f1f4b77a65919700a355fb3617f9488874/yarl-1.20.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a97d67108e79cfe22e2b430d80d7571ae57d19f17cda8bb967057ca8a7bf5bfd", size = 355616, upload-time = "2025-06-10T00:43:58.056Z" }, + { url = "https://files.pythonhosted.org/packages/36/a3/f666894aa947a371724ec7cd2e5daa78ee8a777b21509b4252dd7bd15e29/yarl-1.20.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8570d998db4ddbfb9a590b185a0a33dbf8aafb831d07a5257b4ec9948df9cb0a", size = 339954, upload-time = "2025-06-10T00:43:59.773Z" }, + { url = "https://files.pythonhosted.org/packages/f1/81/5f466427e09773c04219d3450d7a1256138a010b6c9f0af2d48565e9ad13/yarl-1.20.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:97c75596019baae7c71ccf1d8cc4738bc08134060d0adfcbe5642f778d1dca38", size = 365575, upload-time = "2025-06-10T00:44:02.051Z" }, + { url = "https://files.pythonhosted.org/packages/2e/e3/e4b0ad8403e97e6c9972dd587388940a032f030ebec196ab81a3b8e94d31/yarl-1.20.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:1c48912653e63aef91ff988c5432832692ac5a1d8f0fb8a33091520b5bbe19ef", size = 365061, upload-time = "2025-06-10T00:44:04.196Z" }, + { url = "https://files.pythonhosted.org/packages/ac/99/b8a142e79eb86c926f9f06452eb13ecb1bb5713bd01dc0038faf5452e544/yarl-1.20.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4c3ae28f3ae1563c50f3d37f064ddb1511ecc1d5584e88c6b7c63cf7702a6d5f", size = 364142, upload-time = "2025-06-10T00:44:06.527Z" }, + { url = "https://files.pythonhosted.org/packages/34/f2/08ed34a4a506d82a1a3e5bab99ccd930a040f9b6449e9fd050320e45845c/yarl-1.20.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c5e9642f27036283550f5f57dc6156c51084b458570b9d0d96100c8bebb186a8", size = 381894, upload-time = "2025-06-10T00:44:08.379Z" }, + { url = "https://files.pythonhosted.org/packages/92/f8/9a3fbf0968eac704f681726eff595dce9b49c8a25cd92bf83df209668285/yarl-1.20.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:2c26b0c49220d5799f7b22c6838409ee9bc58ee5c95361a4d7831f03cc225b5a", size = 383378, upload-time = "2025-06-10T00:44:10.51Z" }, + { url = "https://files.pythonhosted.org/packages/af/85/9363f77bdfa1e4d690957cd39d192c4cacd1c58965df0470a4905253b54f/yarl-1.20.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:564ab3d517e3d01c408c67f2e5247aad4019dcf1969982aba3974b4093279004", size = 374069, upload-time = "2025-06-10T00:44:12.834Z" }, + { url = "https://files.pythonhosted.org/packages/35/99/9918c8739ba271dcd935400cff8b32e3cd319eaf02fcd023d5dcd487a7c8/yarl-1.20.1-cp312-cp312-win32.whl", hash = "sha256:daea0d313868da1cf2fac6b2d3a25c6e3a9e879483244be38c8e6a41f1d876a5", size = 81249, upload-time = "2025-06-10T00:44:14.731Z" }, + { url = "https://files.pythonhosted.org/packages/eb/83/5d9092950565481b413b31a23e75dd3418ff0a277d6e0abf3729d4d1ce25/yarl-1.20.1-cp312-cp312-win_amd64.whl", hash = "sha256:48ea7d7f9be0487339828a4de0360d7ce0efc06524a48e1810f945c45b813698", size = 86710, upload-time = "2025-06-10T00:44:16.716Z" }, + { url = "https://files.pythonhosted.org/packages/8a/e1/2411b6d7f769a07687acee88a062af5833cf1966b7266f3d8dfb3d3dc7d3/yarl-1.20.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:0b5ff0fbb7c9f1b1b5ab53330acbfc5247893069e7716840c8e7d5bb7355038a", size = 131811, upload-time = "2025-06-10T00:44:18.933Z" }, + { url = "https://files.pythonhosted.org/packages/b2/27/584394e1cb76fb771371770eccad35de400e7b434ce3142c2dd27392c968/yarl-1.20.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:14f326acd845c2b2e2eb38fb1346c94f7f3b01a4f5c788f8144f9b630bfff9a3", size = 90078, upload-time = "2025-06-10T00:44:20.635Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/3246ae92d4049099f52d9b0fe3486e3b500e29b7ea872d0f152966fc209d/yarl-1.20.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f60e4ad5db23f0b96e49c018596707c3ae89f5d0bd97f0ad3684bcbad899f1e7", size = 88748, upload-time = "2025-06-10T00:44:22.34Z" }, + { url = "https://files.pythonhosted.org/packages/a3/25/35afe384e31115a1a801fbcf84012d7a066d89035befae7c5d4284df1e03/yarl-1.20.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49bdd1b8e00ce57e68ba51916e4bb04461746e794e7c4d4bbc42ba2f18297691", size = 349595, upload-time = "2025-06-10T00:44:24.314Z" }, + { url = "https://files.pythonhosted.org/packages/28/2d/8aca6cb2cabc8f12efcb82749b9cefecbccfc7b0384e56cd71058ccee433/yarl-1.20.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:66252d780b45189975abfed839616e8fd2dbacbdc262105ad7742c6ae58f3e31", size = 342616, upload-time = "2025-06-10T00:44:26.167Z" }, + { url = "https://files.pythonhosted.org/packages/0b/e9/1312633d16b31acf0098d30440ca855e3492d66623dafb8e25b03d00c3da/yarl-1.20.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:59174e7332f5d153d8f7452a102b103e2e74035ad085f404df2e40e663a22b28", size = 361324, upload-time = "2025-06-10T00:44:27.915Z" }, + { url = "https://files.pythonhosted.org/packages/bc/a0/688cc99463f12f7669eec7c8acc71ef56a1521b99eab7cd3abb75af887b0/yarl-1.20.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3968ec7d92a0c0f9ac34d5ecfd03869ec0cab0697c91a45db3fbbd95fe1b653", size = 359676, upload-time = "2025-06-10T00:44:30.041Z" }, + { url = "https://files.pythonhosted.org/packages/af/44/46407d7f7a56e9a85a4c207724c9f2c545c060380718eea9088f222ba697/yarl-1.20.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d1a4fbb50e14396ba3d375f68bfe02215d8e7bc3ec49da8341fe3157f59d2ff5", size = 352614, upload-time = "2025-06-10T00:44:32.171Z" }, + { url = "https://files.pythonhosted.org/packages/b1/91/31163295e82b8d5485d31d9cf7754d973d41915cadce070491778d9c9825/yarl-1.20.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11a62c839c3a8eac2410e951301309426f368388ff2f33799052787035793b02", size = 336766, upload-time = "2025-06-10T00:44:34.494Z" }, + { url = "https://files.pythonhosted.org/packages/b4/8e/c41a5bc482121f51c083c4c2bcd16b9e01e1cf8729e380273a952513a21f/yarl-1.20.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:041eaa14f73ff5a8986b4388ac6bb43a77f2ea09bf1913df7a35d4646db69e53", size = 364615, upload-time = "2025-06-10T00:44:36.856Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5b/61a3b054238d33d70ea06ebba7e58597891b71c699e247df35cc984ab393/yarl-1.20.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:377fae2fef158e8fd9d60b4c8751387b8d1fb121d3d0b8e9b0be07d1b41e83dc", size = 360982, upload-time = "2025-06-10T00:44:39.141Z" }, + { url = "https://files.pythonhosted.org/packages/df/a3/6a72fb83f8d478cb201d14927bc8040af901811a88e0ff2da7842dd0ed19/yarl-1.20.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1c92f4390e407513f619d49319023664643d3339bd5e5a56a3bebe01bc67ec04", size = 369792, upload-time = "2025-06-10T00:44:40.934Z" }, + { url = "https://files.pythonhosted.org/packages/7c/af/4cc3c36dfc7c077f8dedb561eb21f69e1e9f2456b91b593882b0b18c19dc/yarl-1.20.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:d25ddcf954df1754ab0f86bb696af765c5bfaba39b74095f27eececa049ef9a4", size = 382049, upload-time = "2025-06-10T00:44:42.854Z" }, + { url = "https://files.pythonhosted.org/packages/19/3a/e54e2c4752160115183a66dc9ee75a153f81f3ab2ba4bf79c3c53b33de34/yarl-1.20.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:909313577e9619dcff8c31a0ea2aa0a2a828341d92673015456b3ae492e7317b", size = 384774, upload-time = "2025-06-10T00:44:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/200ae86dabfca89060ec6447649f219b4cbd94531e425e50d57e5f5ac330/yarl-1.20.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:793fd0580cb9664548c6b83c63b43c477212c0260891ddf86809e1c06c8b08f1", size = 374252, upload-time = "2025-06-10T00:44:47.31Z" }, + { url = "https://files.pythonhosted.org/packages/83/75/11ee332f2f516b3d094e89448da73d557687f7d137d5a0f48c40ff211487/yarl-1.20.1-cp313-cp313-win32.whl", hash = "sha256:468f6e40285de5a5b3c44981ca3a319a4b208ccc07d526b20b12aeedcfa654b7", size = 81198, upload-time = "2025-06-10T00:44:49.164Z" }, + { url = "https://files.pythonhosted.org/packages/ba/ba/39b1ecbf51620b40ab402b0fc817f0ff750f6d92712b44689c2c215be89d/yarl-1.20.1-cp313-cp313-win_amd64.whl", hash = "sha256:495b4ef2fea40596bfc0affe3837411d6aa3371abcf31aac0ccc4bdd64d4ef5c", size = 86346, upload-time = "2025-06-10T00:44:51.182Z" }, + { url = "https://files.pythonhosted.org/packages/43/c7/669c52519dca4c95153c8ad96dd123c79f354a376346b198f438e56ffeb4/yarl-1.20.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:f60233b98423aab21d249a30eb27c389c14929f47be8430efa7dbd91493a729d", size = 138826, upload-time = "2025-06-10T00:44:52.883Z" }, + { url = "https://files.pythonhosted.org/packages/6a/42/fc0053719b44f6ad04a75d7f05e0e9674d45ef62f2d9ad2c1163e5c05827/yarl-1.20.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:6f3eff4cc3f03d650d8755c6eefc844edde99d641d0dcf4da3ab27141a5f8ddf", size = 93217, upload-time = "2025-06-10T00:44:54.658Z" }, + { url = "https://files.pythonhosted.org/packages/4f/7f/fa59c4c27e2a076bba0d959386e26eba77eb52ea4a0aac48e3515c186b4c/yarl-1.20.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:69ff8439d8ba832d6bed88af2c2b3445977eba9a4588b787b32945871c2444e3", size = 92700, upload-time = "2025-06-10T00:44:56.784Z" }, + { url = "https://files.pythonhosted.org/packages/2f/d4/062b2f48e7c93481e88eff97a6312dca15ea200e959f23e96d8ab898c5b8/yarl-1.20.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cf34efa60eb81dd2645a2e13e00bb98b76c35ab5061a3989c7a70f78c85006d", size = 347644, upload-time = "2025-06-10T00:44:59.071Z" }, + { url = "https://files.pythonhosted.org/packages/89/47/78b7f40d13c8f62b499cc702fdf69e090455518ae544c00a3bf4afc9fc77/yarl-1.20.1-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:8e0fe9364ad0fddab2688ce72cb7a8e61ea42eff3c7caeeb83874a5d479c896c", size = 323452, upload-time = "2025-06-10T00:45:01.605Z" }, + { url = "https://files.pythonhosted.org/packages/eb/2b/490d3b2dc66f52987d4ee0d3090a147ea67732ce6b4d61e362c1846d0d32/yarl-1.20.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8f64fbf81878ba914562c672024089e3401974a39767747691c65080a67b18c1", size = 346378, upload-time = "2025-06-10T00:45:03.946Z" }, + { url = "https://files.pythonhosted.org/packages/66/ad/775da9c8a94ce925d1537f939a4f17d782efef1f973039d821cbe4bcc211/yarl-1.20.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6342d643bf9a1de97e512e45e4b9560a043347e779a173250824f8b254bd5ce", size = 353261, upload-time = "2025-06-10T00:45:05.992Z" }, + { url = "https://files.pythonhosted.org/packages/4b/23/0ed0922b47a4f5c6eb9065d5ff1e459747226ddce5c6a4c111e728c9f701/yarl-1.20.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56dac5f452ed25eef0f6e3c6a066c6ab68971d96a9fb441791cad0efba6140d3", size = 335987, upload-time = "2025-06-10T00:45:08.227Z" }, + { url = "https://files.pythonhosted.org/packages/3e/49/bc728a7fe7d0e9336e2b78f0958a2d6b288ba89f25a1762407a222bf53c3/yarl-1.20.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7d7f497126d65e2cad8dc5f97d34c27b19199b6414a40cb36b52f41b79014be", size = 329361, upload-time = "2025-06-10T00:45:10.11Z" }, + { url = "https://files.pythonhosted.org/packages/93/8f/b811b9d1f617c83c907e7082a76e2b92b655400e61730cd61a1f67178393/yarl-1.20.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:67e708dfb8e78d8a19169818eeb5c7a80717562de9051bf2413aca8e3696bf16", size = 346460, upload-time = "2025-06-10T00:45:12.055Z" }, + { url = "https://files.pythonhosted.org/packages/70/fd/af94f04f275f95da2c3b8b5e1d49e3e79f1ed8b6ceb0f1664cbd902773ff/yarl-1.20.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:595c07bc79af2494365cc96ddeb772f76272364ef7c80fb892ef9d0649586513", size = 334486, upload-time = "2025-06-10T00:45:13.995Z" }, + { url = "https://files.pythonhosted.org/packages/84/65/04c62e82704e7dd0a9b3f61dbaa8447f8507655fd16c51da0637b39b2910/yarl-1.20.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7bdd2f80f4a7df852ab9ab49484a4dee8030023aa536df41f2d922fd57bf023f", size = 342219, upload-time = "2025-06-10T00:45:16.479Z" }, + { url = "https://files.pythonhosted.org/packages/91/95/459ca62eb958381b342d94ab9a4b6aec1ddec1f7057c487e926f03c06d30/yarl-1.20.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c03bfebc4ae8d862f853a9757199677ab74ec25424d0ebd68a0027e9c639a390", size = 350693, upload-time = "2025-06-10T00:45:18.399Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/d393e82dd955ad20617abc546a8f1aee40534d599ff555ea053d0ec9bf03/yarl-1.20.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:344d1103e9c1523f32a5ed704d576172d2cabed3122ea90b1d4e11fe17c66458", size = 355803, upload-time = "2025-06-10T00:45:20.677Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ed/c5fb04869b99b717985e244fd93029c7a8e8febdfcffa06093e32d7d44e7/yarl-1.20.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:88cab98aa4e13e1ade8c141daeedd300a4603b7132819c484841bb7af3edce9e", size = 341709, upload-time = "2025-06-10T00:45:23.221Z" }, + { url = "https://files.pythonhosted.org/packages/24/fd/725b8e73ac2a50e78a4534ac43c6addf5c1c2d65380dd48a9169cc6739a9/yarl-1.20.1-cp313-cp313t-win32.whl", hash = "sha256:b121ff6a7cbd4abc28985b6028235491941b9fe8fe226e6fdc539c977ea1739d", size = 86591, upload-time = "2025-06-10T00:45:25.793Z" }, + { url = "https://files.pythonhosted.org/packages/94/c3/b2e9f38bc3e11191981d57ea08cab2166e74ea770024a646617c9cddd9f6/yarl-1.20.1-cp313-cp313t-win_amd64.whl", hash = "sha256:541d050a355bbbc27e55d906bc91cb6fe42f96c01413dd0f4ed5a5240513874f", size = 93003, upload-time = "2025-06-10T00:45:27.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2d/2345fce04cfd4bee161bf1e7d9cdc702e3e16109021035dbb24db654a622/yarl-1.20.1-py3-none-any.whl", hash = "sha256:83b8eb083fe4683c6115795d9fc1cfaf2cbbefb19b3a1cb68f6527460f483a77", size = 46542, upload-time = "2025-06-10T00:46:07.521Z" }, +] From 41a263daa6d163da247c1c8dc7f4da213ff5e615 Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Fri, 18 Jul 2025 17:03:40 +0100 Subject: [PATCH 13/15] core: improve gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 3f0b758..9fdd511 100644 --- a/.gitignore +++ b/.gitignore @@ -65,6 +65,7 @@ mimic*.db # Configuration files config.json *config*.json +m3_pipeline.json # Operating System specific files .DS_Store From f38d8a76b41a0c2039c1e03fe1b7674bdfa6c08c Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Mon, 21 Jul 2025 11:02:39 +0100 Subject: [PATCH 14/15] feat(tools): add m3_tool_builder --- .../configurations/knowledge.yaml | 51 + .../configurations/tools_codebase.txt | 1633 +++++++++++++++++ src/m3/tools/m3toolbuilder/m3toolbuilder.py | 299 +++ 3 files changed, 1983 insertions(+) create mode 100644 src/m3/tools/m3toolbuilder/configurations/knowledge.yaml create mode 100644 src/m3/tools/m3toolbuilder/configurations/tools_codebase.txt create mode 100644 src/m3/tools/m3toolbuilder/m3toolbuilder.py diff --git a/src/m3/tools/m3toolbuilder/configurations/knowledge.yaml b/src/m3/tools/m3toolbuilder/configurations/knowledge.yaml new file mode 100644 index 0000000..1eb777e --- /dev/null +++ b/src/m3/tools/m3toolbuilder/configurations/knowledge.yaml @@ -0,0 +1,51 @@ +principles: + idea_behind_m3_tools: >- + M3 tools are designed as modular, reusable components that integrate + seamlessly with the MCP (Model Context Protocol) ecosystem. The Model Context Protocol (MCP) is an open standard + developed by Anthropic to enable seamless integration between AI models (like large language models) and external + data sources or tools, standardizing access to context such as files, databases, or APIs to reduce custom + integrations. + The core idea is to create tool-agnostic building blocks that can be chained + together to form complex pipelines for tasks like data access, analysis, and + interaction. This allows for easy composition (e.g., combining multiple + tools in one pipeline), scalability (adding new tools without breaking + existing ones), and reproducibility (via presets and serialization). Tools + empower users to build custom MCP servers for specific domains, like + clinical data querying, while maintaining consistency across the library. + importance_of_architecture: >- + Following the M3 architecture is crucial for maintainability, + interoperability, and extensibility. By inheriting from BaseTool and + BaseToolCLI, tools ensure uniform lifecycle management (init/teardown), + environment validation, MCP action registration, and CLI consistency. This + structure prevents fragmentation, enables automatic registration/validation + in the registry, supports chaining APIs for pipelines, and facilitates + integration with MCP hosts like FastMCP or ClaudeDesktop. Deviating from it + could lead to incompatible tools, breaking builds/runs, or security + issuesโ€”sticking to it guarantees tools 'just work' in the ecosystem. + what_is_a_tool: >- + An M3 tool is essentially an MCP-compatible module that provides specific + functionality, such as data querying or processing, through a set of actions + (callable functions exposed to MCP). It encapsulates logic like backends + (e.g., databases), configurations (YAML for env/datasets/security), and + utilities (e.g., data I/O, validation). Tools are self-contained directories + with a main class (inheriting BaseTool for actions/lifecycle/serialization), + CLI (for user setup/status), and optional components. As MCP tools, they + register functions for conversational AI use, enabling secure, modular + interactions in pipelines. + python_best_practices: >- + 1) IMPORTANT + - FILES NAMING for the tool itself. As follows. Name of a folder == name of file . py == name of class. Do not add suffixes, underscores or other characters if not needed. + - Highly-typed Pythonic code: Use beartype for runtime type checking on classes, with annotations strictly for parameters and return types (but not for variables within methods). + - Docstrings: Use one-liner docstrings at the class level only; avoid file-level docstrings. + - It is much better to use hardcoded data from a yaml declarative file than hardcoding the data in the python script. + - Error handling: use try-except blocks to catch side effects. + - Logging: Set up logging `logger = logging.getLogger(__name__)` and utilise logger for debugging and information messages instead of print statements. + - PEP8/idiomatic code: Follow PEP8 standards, use idiomatic Python (no async unless beneficial), and ensure meaningful variable names (avoid single-letter like x, y, z). + + 2) MID IMPORTANT + - Serialization: Include to_dict and from_dict methods for object serialization and deserialization. + - Validation: Perform environment and other checks (e.g., env/checks) to ensure integrity. + + 3) ADDITIONAL + - No individual READMEs: Do not create README.md files for individual tools; handle them manually at the project level. + - No version numbers in code: Avoid including version numbers within tool code; manage them manually at the M3 project level. diff --git a/src/m3/tools/m3toolbuilder/configurations/tools_codebase.txt b/src/m3/tools/m3toolbuilder/configurations/tools_codebase.txt new file mode 100644 index 0000000..838b9fe --- /dev/null +++ b/src/m3/tools/m3toolbuilder/configurations/tools_codebase.txt @@ -0,0 +1,1633 @@ +Directory structure: +โ””โ”€โ”€ tools/ + โ”œโ”€โ”€ __init__.py + โ”œโ”€โ”€ registry.py + โ””โ”€โ”€ mimic/ + โ”œโ”€โ”€ __init__.py + โ”œโ”€โ”€ cli.py + โ”œโ”€โ”€ mimic.py + โ”œโ”€โ”€ components/ + โ”‚ โ”œโ”€โ”€ __init__.py + โ”‚ โ”œโ”€โ”€ auth.py + โ”‚ โ”œโ”€โ”€ data_io.py + โ”‚ โ””โ”€โ”€ utils.py + โ””โ”€โ”€ configurations/ + โ”œโ”€โ”€ datasets.yaml + โ”œโ”€โ”€ env_vars.yaml + โ””โ”€โ”€ security.yaml + +================================================ +FILE: __init__.py +================================================ + + + +================================================ +FILE: registry.py +================================================ +import importlib +import inspect +import logging +import os + +from beartype import beartype + +from m3.core.tool.base import BaseTool +from m3.core.tool.cli.base import BaseToolCLI +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + +TOOLS_DIR = os.path.dirname(__file__) + +ALL_TOOLS = {} + + +@beartype +def _initialize() -> None: + """ + Automatically discover and register tools from subdirectories in tools/. + """ + for entry in os.scandir(TOOLS_DIR): + if entry.is_dir() and not entry.name.startswith("_"): + tool_name = entry.name.lower() + try: + main_module_path = f"m3.tools.{tool_name}.{tool_name}" + main_module = importlib.import_module(main_module_path) + + tool_classes = [ + obj + for name, obj in inspect.getmembers(main_module) + if inspect.isclass(obj) + and issubclass(obj, BaseTool) + and obj != BaseTool + ] + if len(tool_classes) != 1: + raise M3ValidationError( + f"Tool '{tool_name}' must have exactly one subclass of BaseTool in {main_module_path}.py. Found: {len(tool_classes)}" + ) + tool_class = tool_classes[0] + + cli_module_path = f"m3.tools.{tool_name}.cli" + cli_module = importlib.import_module(cli_module_path) + + cli_classes = [ + obj + for name, obj in inspect.getmembers(cli_module) + if inspect.isclass(obj) + and issubclass(obj, BaseToolCLI) + and obj != BaseToolCLI + ] + if len(cli_classes) != 1: + raise M3ValidationError( + f"Tool '{tool_name}' must have exactly one subclass of BaseToolCLI in {cli_module_path}.py. Found: {len(cli_classes)}" + ) + + ALL_TOOLS[tool_name] = tool_class + except ImportError as e: + logger.warning( + f"Failed to import modules for tool '{tool_name}': {e!s}. Skipping registration (components not fully available)." + ) + except M3ValidationError as e: + logger.warning( + f"Validation failed for tool '{tool_name}': {e!s}. Skipping registration (BaseTool or BaseToolCLI not available as required)." + ) + except Exception as e: + logger.error( + f"Unexpected error discovering tool '{tool_name}': {e!s}. Skipping registration.", + exc_info=True, + ) + + +_initialize() + + +================================================ +FILE: mimic/__init__.py +================================================ +from .cli import MimicCLI +from .mimic import MIMIC + +__all__ = ["MIMIC", "MimicCLI"] + + + +================================================ +FILE: mimic/cli.py +================================================ +import json +import logging +import os +from pathlib import Path + +import typer +from beartype import beartype +from beartype.typing import Annotated, Any, Dict, Optional +from rich.console import Console +from rich.panel import Panel +from rich.table import Table + +from m3.core.config import M3Config +from m3.core.tool.cli.base import BaseToolCLI, ToolConfig +from m3.core.utils.exceptions import M3ValidationError +from m3.core.utils.helpers import get_config +from m3.tools.mimic.components.data_io import DataIO +from m3.tools.mimic.components.utils import ( + get_default_database_path, + load_supported_datasets, +) + +logger = logging.getLogger(__name__) + +console = Console() + + +@beartype +class MimicCLI(BaseToolCLI): + @classmethod + def get_app(cls) -> typer.Typer: + app = typer.Typer( + help="MIMIC-IV tool commands.", + add_completion=False, + pretty_exceptions_show_locals=False, + rich_markup_mode="markdown", + ) + app.command(help="Initialise the dataset and database.")(cls.init) + app.command(help="Configure the MIMIC-IV tool.")(cls.configure) + app.command(help="Display the current status of the MIMIC-IV tool.")(cls.status) + return app + + @classmethod + def init( + cls, + dataset: Annotated[ + str, + typer.Option( + "--dataset", + "-d", + help="Dataset name to initialize (e.g., 'mimic-iv-demo').", + ), + ] = "mimic-iv-demo", + db_path: Annotated[ + Optional[str], + typer.Option( + "--db-path", + "-p", + help="Path to save the SQLite DB (defaults to a standard location).", + ), + ] = None, + force: Annotated[ + bool, + typer.Option( + "--force", "-f", help="Force re-download and re-initialization." + ), + ] = False, + ) -> None: + datasets = load_supported_datasets() + if dataset.lower() not in datasets: + console.print("[red]โŒ Unknown dataset. Available:[/red]") + table = Table(show_header=False) + for ds in datasets.keys(): + table.add_row(f"[cyan]{ds}[/cyan]") + console.print(table) + raise typer.Exit(code=1) + + config = get_config() + _db_path = ( + Path(db_path) if db_path else get_default_database_path(config, dataset) + ) + if _db_path is None: + console.print("[red]โŒ Cannot determine DB path.[/red]") + raise typer.Exit(code=1) + + if _db_path.exists() and not force: + console.print( + f"[yellow]โš ๏ธ DB exists at {_db_path}. Use --force to overwrite.[/yellow]" + ) + raise typer.Exit(code=1) + + data_io = DataIO(config) + success = data_io.initialize(dataset, _db_path) + + if success: + console.print(f"[green]โœ… Initialized {dataset} at {_db_path}.[/green]") + else: + console.print(f"[red]โŒ Initialization failed for {dataset}.[/red]") + raise typer.Exit(code=1) + + @classmethod + def configure( + cls, + backend: Annotated[ + Optional[str], + typer.Option("--backend", "-b", help="Backend ('sqlite' or 'bigquery')."), + ] = None, + db_path: Annotated[ + Optional[str], + typer.Option("--db-path", help="SQLite DB path (if backend=sqlite)."), + ] = None, + project_id: Annotated[ + Optional[str], + typer.Option("--project-id", help="GCP Project ID (if backend=bigquery)."), + ] = None, + enable_oauth2: Annotated[ + bool, + typer.Option("--enable-oauth2", "-o", help="Enable OAuth2."), + ] = False, + issuer_url: Annotated[ + Optional[str], + typer.Option("--issuer-url", help="OAuth2 Issuer URL."), + ] = None, + audience: Annotated[ + Optional[str], + typer.Option("--audience", help="OAuth2 Audience."), + ] = None, + required_scopes: Annotated[ + Optional[str], + typer.Option( + "--required-scopes", help="OAuth2 Required Scopes (comma-separated)." + ), + ] = None, + jwks_url: Annotated[ + Optional[str], + typer.Option("--jwks-url", help="OAuth2 JWKS URL (optional)."), + ] = None, + rate_limit_requests: Annotated[ + int, + typer.Option("--rate-limit-requests", help="OAuth2 Rate Limit Requests."), + ] = 100, + output: Annotated[ + Optional[str], + typer.Option("--output", "-o", help="Output path for config JSON."), + ] = None, + verbose: Annotated[ + bool, + typer.Option("--verbose", "-v", help="Print config dict."), + ] = False, + ) -> ToolConfig: + env_vars: Dict[str, str] = {} + tool_params: Dict[str, Any] = {} + + console.print("[turquoise4]๐Ÿ’ฌ Configuring MIMIC-IV tool...[/turquoise4]") + + if not backend: + backend = typer.prompt( + "Backend (sqlite/bigquery)", default="sqlite" + ).lower() + + if backend not in ["sqlite", "bigquery"]: + console.print("[red]โŒ Invalid backend. Use 'sqlite' or 'bigquery'.[/red]") + raise typer.Exit(code=1) + + env_vars["M3_BACKEND"] = backend + tool_params["backend_key"] = backend + + backends_list = [] + if backend == "sqlite": + if db_path is None: + default_db = get_default_database_path(get_config(), "mimic-iv-demo") + if default_db is None: + raise M3ValidationError("Cannot determine default DB path") + console.print(f"[yellow]๐Ÿ’ฌ Default DB path: {default_db}[/yellow]") + db_path = typer.prompt( + "SQLite DB path (Enter for default)", default=str(default_db) + ) + if db_path and not Path(db_path).exists(): + console.print( + f"[yellow]โš ๏ธ DB path {db_path} does not exist. Using default path.[/yellow]" + ) + db_path = str(get_default_database_path(get_config(), "mimic-iv-demo")) + env_vars["M3_DB_PATH"] = db_path + backends_list.append({"type": "sqlite", "params": {"path": db_path}}) + elif backend == "bigquery": + if project_id is None: + project_id = typer.prompt("GCP Project ID (required)") + if not project_id: + raise M3ValidationError("Project ID required for BigQuery") + env_vars["M3_PROJECT_ID"] = project_id + env_vars["GOOGLE_CLOUD_PROJECT"] = project_id + backends_list.append( + {"type": "bigquery", "params": {"project": project_id}} + ) + + tool_params["backends"] = backends_list + + if enable_oauth2: + if issuer_url is None: + issuer_url = typer.prompt("Issuer URL") + if audience is None: + audience = typer.prompt("Audience") + if required_scopes is None: + required_scopes = typer.prompt( + "Scopes [read:mimic-data]", default="read:mimic-data" + ) + env_vars.update( + { + "M3_OAUTH2_ENABLED": "true", + "M3_OAUTH2_ISSUER_URL": issuer_url, + "M3_OAUTH2_AUDIENCE": audience, + "M3_OAUTH2_REQUIRED_SCOPES": required_scopes, + } + ) + if jwks_url is None: + jwks_url = typer.prompt("JWKS URL (optional)", default="") + jwks_url = jwks_url.strip() + if jwks_url: + env_vars["M3_OAUTH2_JWKS_URL"] = jwks_url + env_vars["M3_OAUTH2_RATE_LIMIT_REQUESTS"] = str(rate_limit_requests) + + console.print( + "\n[turquoise4]๐Ÿ’ฌ Additional env vars (key=value, Enter to finish):[/turquoise4]" + ) + additional_env = {} + while True: + env_var = typer.prompt("", default="", show_default=False) + if not env_var: + break + if "=" in env_var: + key, value = env_var.split("=", 1) + additional_env[key.strip()] = value.strip() + else: + console.print("[red]Invalid: Use key=value[/red]") + env_vars.update(additional_env) + + config_dict = {"env_vars": env_vars, "tool_params": tool_params} + + output = output or "mimic_config.json" + with open(output, "w") as f: + json.dump(config_dict, f, indent=4) + console.print(f"[green]โœ… Config dict saved to {output}[/green]") + + if verbose: + console.print( + Panel( + json.dumps(config_dict, indent=2), + title="[bold green]Configuration[/bold green]", + border_style="green", + ) + ) + return config_dict + + @classmethod + def status(cls, verbose: bool = False) -> None: + try: + config = M3Config(env_vars=os.environ.copy()) + _db_path = ( + str(get_default_database_path(config, "mimic-iv-demo")) or "Default" + ) + + table = Table(title="[bold green]MIMIC Tool Status[/bold green]") + table.add_column("Key", style="cyan") + table.add_column("Value", style="magenta") + table.add_row("Backend", config.get_env_var("M3_BACKEND", "sqlite")) + table.add_row("DB Path", config.get_env_var("M3_DB_PATH", _db_path)) + table.add_row( + "OAuth2 Enabled", config.get_env_var("M3_OAUTH2_ENABLED", "No") + ) + console.print(table) + if verbose: + env_table = Table( + title="[bold green]Environment Variables (M3_*)[/bold green]" + ) + env_table.add_column("Key", style="cyan") + env_table.add_column("Value", style="magenta") + for key, value in sorted(config.env_vars.items()): + if key.startswith("M3_"): + env_table.add_row(key, value) + console.print(env_table) + except Exception as e: + console.print(f"[red]โŒ Error getting status: {e}[/red]") + logger.error(f"Status failed: {e}") + + + +================================================ +FILE: mimic/mimic.py +================================================ +import logging +from collections.abc import Callable + +import sqlparse +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional, Tuple + +from m3.core.config import M3Config +from m3.core.tool.backend.base import BackendBase +from m3.core.tool.backend.registry import BACKEND_REGISTRY +from m3.core.tool.base import BaseTool +from m3.core.utils.exceptions import M3ValidationError +from m3.tools.mimic.components.auth import Auth +from m3.tools.mimic.components.data_io import DataIO +from m3.tools.mimic.components.utils import ( + load_env_vars_config, + load_security_config, + validate_limit, +) + +logger = logging.getLogger(__name__) + + +@beartype +class MIMIC(BaseTool): + @beartype + def __init__( + self, + backends: List[BackendBase], + config: Optional[M3Config] = None, + data_io: Optional[DataIO] = None, + backend_key: str = "sqlite", + ) -> None: + super().__init__() + self.config = config or M3Config() + self.env_config = load_env_vars_config() + self._set_required_env_vars(backend_key) + self._set_backends(backends) + self.data_io = data_io or DataIO(self.config) + self.backend_key = backend_key + self._set_auth() + self._validate_backend_key(backend_key) + self.security_config = {} + self.table_names = {} + + def to_dict(self) -> Dict[str, Any]: + return { + "backend_key": self.backend_key, + "backends": [ + {"type": k, "params": v.to_dict()} for k, v in self.backends.items() + ], + } + + @classmethod + def from_dict(cls, params: Dict[str, Any]) -> "MIMIC": + try: + backends_list = [] + for bd in params["backends"]: + backend_type = bd["type"] + if backend_type not in BACKEND_REGISTRY: + raise ValueError(f"Unknown backend type: {backend_type}") + backend_cls = BACKEND_REGISTRY[backend_type] + backends_list.append(backend_cls.from_dict(bd["params"])) + return cls( + backends=backends_list, + backend_key=params["backend_key"], + ) + except KeyError as e: + raise ValueError(f"Missing required param: {e}") from e + except Exception as e: + raise ValueError(f"Failed to reconstruct MIMIC: {e}") from e + + def actions(self) -> List[Callable]: + def get_database_schema() -> str: + """๐Ÿ” Discover what data is available in the MIMIC-IV database. + + **When to use:** Start here when you need to understand what tables exist, or when someone asks about data that might be in multiple tables. + + **What this does:** Shows all available tables so you can identify which ones contain the data you need. + + **Next steps after using this:** + - If you see relevant tables, use `get_table_info(table_name)` to explore their structure + - Common tables: `patients` (demographics), `admissions` (hospital stays), `icustays` (ICU data), `labevents` (lab results) + + Returns: + List of all available tables in the database with current backend info + """ + backend_info = self._get_backend_info() + if "sqlite" in self.backend_key.lower(): + query = ( + "SELECT name FROM sqlite_master WHERE type='table' ORDER BY name" + ) + result = self.backends[self.backend_key].execute(query) + return f"{backend_info}\n๐Ÿ“‹ **Available Tables:**\n{result}" + else: + hosp_dataset = self.config.get_env_var( + "M3_BIGQUERY_HOSP_DATASET", "mimiciv_3_1_hosp" + ) + icu_dataset = self.config.get_env_var( + "M3_BIGQUERY_ICU_DATASET", "mimiciv_3_1_icu" + ) + project = self.config.get_env_var( + "M3_BIGQUERY_PROJECT", "physionet-data" + ) + query = f""" + SELECT CONCAT('`{project}.{hosp_dataset}.', table_name, '`') as query_ready_table_name + FROM `{project}.{hosp_dataset}.INFORMATION_SCHEMA.TABLES` + UNION ALL + SELECT CONCAT('`{project}.{icu_dataset}.', table_name, '`') as query_ready_table_name + FROM `{project}.{icu_dataset}.INFORMATION_SCHEMA.TABLES` + ORDER BY query_ready_table_name + """ + result = self.backends[self.backend_key].execute(query) + return f"{backend_info}\n๐Ÿ“‹ **Available Tables (query-ready names):**\n{result}\n\n๐Ÿ’ก **Copy-paste ready:** These table names can be used directly in your SQL queries!" + + def get_table_info(table_name: str, show_sample: bool = True) -> str: + """๐Ÿ“‹ Explore a specific table's structure and see sample data. + + **When to use:** After you know which table you need (from `get_database_schema()`), use this to understand the columns and data format. + + **What this does:** + - Shows column names, types, and constraints + - Displays sample rows so you understand the actual data format + - Helps you write accurate SQL queries + + **Pro tip:** Always look at sample data! It shows you the actual values, date formats, and data patterns. + + Args: + table_name: Exact table name from the schema (case-sensitive). Can be simple name or fully qualified BigQuery name. + show_sample: Whether to include sample rows (default: True, recommended) + + Returns: + Complete table structure with sample data to help you write queries + """ + backend_info = self._get_backend_info() + if "sqlite" in self.backend_key.lower(): + pragma_query = f"PRAGMA table_info({table_name})" + try: + result = self.backends[self.backend_key].execute(pragma_query) + info_result = f"{backend_info}๐Ÿ“‹ **Table:** {table_name}\n\n**Column Information:**\n{result}" + if show_sample: + sample_query = f"SELECT * FROM {table_name} LIMIT 3" + sample_result = self.backends[self.backend_key].execute( + sample_query + ) + info_result += ( + f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" + ) + return info_result + except Exception as e: + return f"{backend_info}โŒ Error examining table '{table_name}': {e}\n\n๐Ÿ’ก Use get_database_schema() to see available tables." + else: + if "." in table_name and "physionet-data" in table_name: + clean_name = table_name.strip("`") + full_table_name = f"`{clean_name}`" + parts = clean_name.split(".") + if len(parts) != 3: + return f"{backend_info}โŒ **Invalid qualified table name:** `{table_name}`\n\n**Expected format:** `project.dataset.table`\n**Example:** `physionet-data.mimiciv_3_1_hosp.diagnoses_icd`\n\n**Available MIMIC-IV datasets:**\n- `physionet-data.mimiciv_3_1_hosp.*` (hospital module)\n- `physionet-data.mimiciv_3_1_icu.*` (ICU module)" + simple_table_name = parts[2] + dataset = f"{parts[0]}.{parts[1]}" + else: + simple_table_name = table_name + full_table_name = None + dataset = None + + if full_table_name: + try: + info_query = f""" + SELECT column_name, data_type, is_nullable + FROM {dataset}.INFORMATION_SCHEMA.COLUMNS + WHERE table_name = '{simple_table_name}' + ORDER BY ordinal_position + """ + info_result = self.backends[self.backend_key].execute( + info_query + ) + if "No results found" not in info_result: + result = f"{backend_info}๐Ÿ“‹ **Table:** {full_table_name}\n\n**Column Information:**\n{info_result}" + if show_sample: + sample_query = ( + f"SELECT * FROM {full_table_name} LIMIT 3" + ) + sample_result = self.backends[self.backend_key].execute( + sample_query + ) + result += f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" + return result + except Exception: + pass + + for ds in [ + self.config.get_env_var( + "M3_BIGQUERY_HOSP_DATASET", "mimiciv_3_1_hosp" + ), + self.config.get_env_var( + "M3_BIGQUERY_ICU_DATASET", "mimiciv_3_1_icu" + ), + ]: + try: + full_table_name = f"`{self.config.get_env_var('M3_BIGQUERY_PROJECT', 'physionet-data')}.{ds}.{simple_table_name}`" + info_query = f""" + SELECT column_name, data_type, is_nullable + FROM `{self.config.get_env_var("M3_BIGQUERY_PROJECT", "physionet-data")}.{ds}.INFORMATION_SCHEMA.COLUMNS` + WHERE table_name = '{simple_table_name}' + ORDER BY ordinal_position + """ + info_result = self.backends[self.backend_key].execute( + info_query + ) + if "No results found" not in info_result: + result = f"{backend_info}๐Ÿ“‹ **Table:** {full_table_name}\n\n**Column Information:**\n{info_result}" + if show_sample: + sample_query = ( + f"SELECT * FROM {full_table_name} LIMIT 3" + ) + sample_result = self.backends[self.backend_key].execute( + sample_query + ) + result += f"\n\n๐Ÿ“Š **Sample Data (first 3 rows):**\n{sample_result}" + return result + except Exception: + continue + return f"{backend_info}โŒ Table '{table_name}' not found in any dataset. Use get_database_schema() to see available tables." + + def execute_mimic_query(sql_query: str) -> str: + """๐Ÿš€ Execute SQL queries to analyze MIMIC-IV data. + + **๐Ÿ’ก Pro tip:** For best results, explore the database structure first! + + **Recommended workflow (especially for smaller models):** + 1. **See available tables:** Use `get_database_schema()` to list all tables + 2. **Examine table structure:** Use `get_table_info('table_name')` to see columns and sample data + 3. **Write your SQL query:** Use exact table/column names from exploration + + **Why exploration helps:** + - Table names vary between backends (SQLite vs BigQuery) + - Column names may be unexpected (e.g., age might be 'anchor_age') + - Sample data shows actual formats and constraints + + Args: + sql_query: Your SQL SELECT query (must be SELECT only) + + Returns: + Query results or helpful error messages with next steps + """ + is_safe, message = self._is_safe_query(sql_query) + if not is_safe: + if "describe" in sql_query.lower() or "show" in sql_query.lower(): + return f"โŒ **Security Error:** {message}\n\n๐Ÿ” **For table structure:** Use `get_table_info('table_name')` instead of DESCRIBE\n๐Ÿ“‹ **Why this is better:** Shows columns, types, AND sample data to understand the actual data\n\n๐Ÿ’ก **Recommended workflow:**\n1. `get_database_schema()` โ† See available tables\n2. `get_table_info('table_name')` โ† Explore structure\n3. `execute_mimic_query('SELECT ...')` โ† Run your analysis" + return f"โŒ **Security Error:** {message}\n\n๐Ÿ’ก **Tip:** Only SELECT statements are allowed for data analysis." + try: + result = self.backends[self.backend_key].execute(sql_query) + return result + except Exception as e: + error_msg = str(e).lower() + suggestions = [] + if "no such table" in error_msg or "table not found" in error_msg: + suggestions.append( + "๐Ÿ” **Table name issue:** Use `get_database_schema()` to see exact table names" + ) + suggestions.append( + f"๐Ÿ“‹ **Backend-specific naming:** {self.backend_key} has specific table naming conventions" + ) + suggestions.append( + "๐Ÿ’ก **Quick fix:** Check if the table name matches exactly (case-sensitive)" + ) + if "no such column" in error_msg or "column not found" in error_msg: + suggestions.append( + "๐Ÿ” **Column name issue:** Use `get_table_info('table_name')` to see available columns" + ) + suggestions.append( + "๐Ÿ“ **Common issue:** Column might be named differently (e.g., 'anchor_age' not 'age')" + ) + suggestions.append( + "๐Ÿ‘€ **Check sample data:** `get_table_info()` shows actual column names and sample values" + ) + if "syntax error" in error_msg: + suggestions.append( + "๐Ÿ“ **SQL syntax issue:** Check quotes, commas, and parentheses" + ) + suggestions.append( + f"๐ŸŽฏ **Backend syntax:** Verify your SQL works with {self.backend_key}" + ) + suggestions.append( + "๐Ÿ’ญ **Try simpler:** Start with `SELECT * FROM table_name LIMIT 5`" + ) + if "describe" in error_msg.lower() or "show" in error_msg.lower(): + suggestions.append( + "๐Ÿ” **Schema exploration:** Use `get_table_info('table_name')` instead of DESCRIBE" + ) + suggestions.append( + "๐Ÿ“‹ **Better approach:** `get_table_info()` shows columns AND sample data" + ) + if not suggestions: + suggestions.append( + "๐Ÿ” **Start exploration:** Use `get_database_schema()` to see available tables" + ) + suggestions.append( + "๐Ÿ“‹ **Check structure:** Use `get_table_info('table_name')` to understand the data" + ) + suggestion_text = "\n".join(f" {s}" for s in suggestions) + return f"โŒ **Query Failed:** {e}\n\n๐Ÿ› ๏ธ **How to fix this:**\n{suggestion_text}\n\n๐ŸŽฏ **Quick Recovery Steps:**\n1. `get_database_schema()` โ† See what tables exist\n2. `get_table_info('your_table')` โ† Check exact column names\n3. Retry your query with correct names\n\n๐Ÿ“š **Current Backend:** {self.backend_key} - table names and syntax are backend-specific" + + def get_icu_stays(patient_id: Optional[int] = None, limit: int = 10) -> str: + """๐Ÿฅ Get ICU stay information and length of stay data. + + **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. + **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. + + **What you'll get:** Patient IDs, admission times, length of stay, and ICU details. + + Args: + patient_id: Specific patient ID to query (optional) + limit: Maximum number of records to return (default: 10) + + Returns: + ICU stay data as formatted text or guidance if table not found + """ + if not validate_limit(limit): + return "Error: Invalid limit. Must be a positive integer between 1 and 1000." + icustays_table = self.table_names["icustays"] + if patient_id: + query = ( + f"SELECT * FROM {icustays_table} WHERE subject_id = {patient_id}" + ) + else: + query = f"SELECT * FROM {icustays_table} LIMIT {limit}" + result = self.backends[self.backend_key].execute(query) + if "error" in result.lower() or "not found" in result.lower(): + return f"โŒ **Convenience function failed:** {result}\n\n๐Ÿ’ก **For reliable results, use the proper workflow:**\n1. `get_database_schema()` โ† See actual table names\n2. `get_table_info('table_name')` โ† Understand structure\n3. `execute_mimic_query('your_sql')` โ† Use exact names\n\nThis ensures compatibility across different MIMIC-IV setups." + return result + + def get_lab_results( + patient_id: Optional[int] = None, + lab_item: Optional[str] = None, + limit: int = 20, + ) -> str: + """๐Ÿงช Get laboratory test results quickly. + + **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. + **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. + + **What you'll get:** Lab values, timestamps, patient IDs, and test details. + + Args: + patient_id: Specific patient ID to query (optional) + lab_item: Lab item to search for in the value field (optional) + limit: Maximum number of records to return (default: 20) + + Returns: + Lab results as formatted text or guidance if table not found + """ + if not validate_limit(limit): + return "Error: Invalid limit. Must be a positive integer between 1 and 1000." + labevents_table = self.table_names["labevents"] + conditions = [] + if patient_id: + conditions.append(f"subject_id = {patient_id}") + if lab_item: + escaped_lab_item = lab_item.replace("'", "''") + conditions.append(f"value LIKE '%{escaped_lab_item}%'") + base_query = f"SELECT * FROM {labevents_table}" + if conditions: + base_query += " WHERE " + " AND ".join(conditions) + base_query += f" LIMIT {limit}" + result = self.backends[self.backend_key].execute(base_query) + if "error" in result.lower() or "not found" in result.lower(): + return f"โŒ **Convenience function failed:** {result}\n\n๐Ÿ’ก **For reliable results, use the proper workflow:**\n1. `get_database_schema()` โ† See actual table names\n2. `get_table_info('table_name')` โ† Understand structure\n3. `execute_mimic_query('your_sql')` โ† Use exact names\n\nThis ensures compatibility across different MIMIC-IV setups." + return result + + def get_race_distribution(limit: int = 10) -> str: + """๐Ÿ“Š Get race distribution from hospital admissions. + + **โš ๏ธ Note:** This is a convenience function that assumes standard MIMIC-IV table structure. + **For reliable queries:** Use `get_database_schema()` โ†’ `get_table_info()` โ†’ `execute_mimic_query()` workflow. + + **What you'll get:** Count of patients by race category, ordered by frequency. + + Args: + limit: Maximum number of race categories to return (default: 10) + + Returns: + Race distribution as formatted text or guidance if table not found + """ + if not validate_limit(limit): + return "Error: Invalid limit. Must be a positive integer between 1 and 1000." + admissions_table = self.table_names["admissions"] + query = f"SELECT race, COUNT(*) as count FROM {admissions_table} GROUP BY race ORDER BY count DESC LIMIT {limit}" + result = self.backends[self.backend_key].execute(query) + if "error" in result.lower() or "not found" in result.lower(): + return f"โŒ **Convenience function failed:** {result}\n\n๐Ÿ’ก **For reliable results, use the proper workflow:**\n1. `get_database_schema()` โ† See actual table names\n2. `get_table_info('table_name')` โ† Understand structure\n3. `execute_mimic_query('your_sql')` โ† Use exact names\n\nThis ensures compatibility across different MIMIC-IV setups." + return result + + actions_list = [ + get_database_schema, + get_table_info, + execute_mimic_query, + get_icu_stays, + get_lab_results, + get_race_distribution, + ] + if self.auth: + actions_list = [self.auth.decorator(action) for action in actions_list] + return actions_list + + def _set_required_env_vars(self, backend_key: str) -> None: + self.required_env_vars = {} + + def add_required_vars(section_vars: List[Dict[str, Any]]) -> None: + for var in section_vars: + if var.get("required", False): + key = var["key"] + default = var.get("default", None) + self.required_env_vars[key] = default + + add_required_vars(self.env_config.get("core", [])) + + backend_section = self.env_config.get("backends", {}).get(backend_key, []) + add_required_vars(backend_section) + + enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + if enabled: + add_required_vars(self.env_config.get("oauth2", [])) + + logger.debug( + f"Set {len(self.required_env_vars)} required env vars for backend '{backend_key}', oauth enabled: {enabled}" + ) + + def _set_backends(self, backends: List[BackendBase]) -> None: + self.backends = { + b.__class__.__name__.lower().replace("backend", ""): b for b in backends + } + + def _set_auth(self) -> None: + enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + self.auth = Auth(self.config) if enabled else None + + def _validate_backend_key(self, backend_key: str) -> None: + if backend_key not in self.backends: + raise M3ValidationError(f"Invalid backend key: {backend_key}") + + def _initialize(self) -> None: + self.table_names = {} + if self.backend_key == "sqlite": + env_vars = { + "icustays": ("M3_ICUSTAYS_TABLE", "icu_icustays"), + "labevents": ("M3_LABEVENTS_TABLE", "hosp_labevents"), + "admissions": ("M3_ADMISSIONS_TABLE", "hosp_admissions"), + } + self.table_names = { + key: self.config.get_env_var(*env) for key, env in env_vars.items() + } + else: + prefix = self.config.get_env_var( + "M3_BIGQUERY_PREFIX", "`physionet-data.mimiciv_3_1_" + ) + self.table_names = { + "icustays": f"{prefix}icu.icustays`", + "labevents": f"{prefix}hosp.labevents`", + "admissions": f"{prefix}hosp.admissions`", + } + + def _get_backend_info(self) -> str: + if "sqlite" in self.backend_key.lower(): + return f"๐Ÿ”ง **Current Backend:** SQLite (local database)\n๐Ÿ“ **Database Path:** {self.backends[self.backend_key].path}\n" + else: + return f"๐Ÿ”ง **Current Backend:** BigQuery (cloud database)\nโ˜๏ธ **Project ID:** {self.backends[self.backend_key].project}\n" + + def _is_safe_query(self, sql_query: str) -> Tuple[bool, str]: + if not sql_query or not sql_query.strip(): + return False, "Empty query" + parsed = sqlparse.parse(sql_query.strip()) + if not parsed: + return False, "Invalid SQL syntax" + if len(parsed) > 1: + return False, "Multiple statements not allowed" + statement = parsed[0] + statement_type = statement.get_type() + if statement_type not in ("SELECT", "UNKNOWN"): + return False, "Only SELECT and PRAGMA queries allowed" + sql_upper = sql_query.strip().upper() + if sql_upper.startswith("PRAGMA"): + return True, "Safe PRAGMA statement" + if not self.security_config: + self.security_config = load_security_config() + dangerous_keywords = set(self.security_config.get("dangerous_keywords", [])) + for keyword in dangerous_keywords: + if f" {keyword} " in f" {sql_upper} ": + return False, f"Write operation not allowed: {keyword}" + injection_patterns = self.security_config.get("injection_patterns", []) + for pattern, description in injection_patterns: + if pattern.upper() in sql_upper: + return False, f"Injection pattern detected: {description}" + suspicious_names = set(self.security_config.get("suspicious_names", [])) + for name in suspicious_names: + if name.upper() in sql_upper: + return ( + False, + f"Suspicious identifier detected: {name} (not medical data)", + ) + return True, "Safe" + + def _post_load(self) -> None: + self.data_io = DataIO(self.config) + enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + self.auth = Auth(self.config) if enabled else None + + + +================================================ +FILE: mimic/components/__init__.py +================================================ +from .auth import Auth +from .data_io import DataIO + +__all__ = ["Auth", "DataIO"] + + + +================================================ +FILE: mimic/components/auth.py +================================================ +import asyncio +import logging +import time +from collections.abc import Callable +from functools import wraps +from urllib.parse import urljoin + +import httpx +import jwt +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional +from cryptography.hazmat.primitives import serialization +from cryptography.hazmat.primitives.asymmetric import rsa + +from m3.core.config import M3Config +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +class Auth: + def __init__(self, config: M3Config) -> None: + self.config = config + self._set_enabled() + if not self.enabled: + return + self._set_issuer_and_audience() + self._set_required_scopes() + self._set_jwks_url() + self._set_cache() + self._set_http_client() + self._set_rate_limit() + self._set_validation_flags() + logger.info(f"OAuth2 enabled: {self.enabled}, issuer: {self.issuer_url}") + + async def authenticate(self, token: str) -> Dict[str, Any]: + jwks = await self._get_jwks() + unverified_header = jwt.get_unverified_header(token) + kid = unverified_header.get("kid") + if not kid: + raise M3ValidationError("Token missing key ID (kid)") + key = self._find_key(jwks, kid) + if not key: + raise M3ValidationError(f"No key found for kid: {kid}") + public_key = self._jwk_to_pem(key) + payload = jwt.decode( + token, + public_key, + algorithms=["RS256", "ES256"], + audience=self.audience, + issuer=self.issuer_url, + ) + self._validate_scopes(payload) + if self.rate_limit_enabled: + self._check_rate_limit(payload) + return payload + + @staticmethod + def generate_test_token( + issuer: str = "https://test-issuer.example.com", + audience: str = "m3-api", + subject: str = "test-user", + scopes: Optional[List[str]] = None, + expires_in: int = 3600, + ) -> str: + from datetime import datetime, timedelta, timezone + + scopes = scopes or ["read:mimic-data"] + now = datetime.now(timezone.utc) + claims = { + "iss": issuer, + "aud": audience, + "sub": subject, + "iat": int(now.timestamp()), + "exp": int((now + timedelta(seconds=expires_in)).timestamp()), + "scope": " ".join(scopes), + } + private_key = rsa.generate_private_key(public_exponent=65537, key_size=2048) + private_pem = private_key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.PKCS8, + encryption_algorithm=serialization.NoEncryption(), + ) + return jwt.encode(claims, private_pem, algorithm="RS256") + + def decorator(self, func: Callable) -> Callable: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + if not self.enabled: + return ( + await func(*args, **kwargs) + if asyncio.iscoroutinefunction(func) + else func(*args, **kwargs) + ) + token = self.config.get_env_var("M3_OAUTH2_TOKEN", "") + if token.startswith("Bearer "): + token = token[7:] + if not token: + raise M3ValidationError("Missing OAuth2 access token") + await self.authenticate(token) + return ( + await func(*args, **kwargs) + if asyncio.iscoroutinefunction(func) + else func(*args, **kwargs) + ) + + return wrapper + + async def _get_jwks(self) -> Dict[str, Any]: + current_time = time.time() + if ( + self._jwks_cache + and current_time - self._jwks_cache_time < self.jwks_cache_ttl + ): + return self._jwks_cache + async with httpx.AsyncClient(timeout=30.0) as client: + response = await client.get(self.jwks_url) + response.raise_for_status() + jwks = response.json() + self._jwks_cache = jwks + self._jwks_cache_time = current_time + return jwks + + def _find_key(self, jwks: Dict[str, Any], kid: str) -> Optional[Dict[str, Any]]: + keys = jwks.get("keys", []) + for key in keys: + if key.get("kid") == kid: + return key + return None + + def _jwk_to_pem(self, jwk: Dict[str, Any]) -> bytes: + from jose.utils import base64url_decode + + if jwk.get("kty") == "RSA": + n = base64url_decode(jwk["n"]) + e = base64url_decode(jwk["e"]) + public_numbers = rsa.RSAPublicNumbers( + int.from_bytes(e, byteorder="big"), + int.from_bytes(n, byteorder="big"), + ) + public_key = public_numbers.public_key() + pem = public_key.public_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PublicFormat.SubjectPublicKeyInfo, + ) + return pem + raise M3ValidationError(f"Unsupported key type: {jwk.get('kty')}") + + def _validate_scopes(self, payload: Dict[str, Any]) -> None: + token_scopes = set() + scope_claim = payload.get("scope", "") + if isinstance(scope_claim, str): + token_scopes = set(scope_claim.split()) + elif isinstance(scope_claim, list): + token_scopes = set(scope_claim) + scp_claim = payload.get("scp", []) + if isinstance(scp_claim, list): + token_scopes.update(scp_claim) + missing_scopes = self.required_scopes - token_scopes + if missing_scopes: + raise M3ValidationError(f"Missing required scopes: {missing_scopes}") + + def _check_rate_limit(self, payload: Dict[str, Any]) -> None: + user_id = payload.get("sub", "unknown") + current_time = time.time() + window_start = current_time - self.rate_limit_window + user_requests = self._rate_limit_cache.get(user_id, []) + user_requests = [t for t in user_requests if t > window_start] + if len(user_requests) >= self.rate_limit_requests: + raise M3ValidationError("Rate limit exceeded") + user_requests.append(current_time) + self._rate_limit_cache[user_id] = user_requests + + def _set_enabled(self) -> None: + self.enabled = ( + self.config.get_env_var("M3_OAUTH2_ENABLED", "false").lower() == "true" + ) + + def _set_issuer_and_audience(self) -> None: + self.issuer_url = self.config.get_env_var( + "M3_OAUTH2_ISSUER_URL", raise_if_missing=True + ) + self.audience = self.config.get_env_var( + "M3_OAUTH2_AUDIENCE", raise_if_missing=True + ) + + def _set_required_scopes(self) -> None: + self.required_scopes = { + scope.strip() + for scope in self.config.get_env_var( + "M3_OAUTH2_REQUIRED_SCOPES", "read:mimic-data" + ).split(",") + } + + def _set_jwks_url(self) -> None: + self.jwks_url = self.config.get_env_var("M3_OAUTH2_JWKS_URL") or urljoin( + self.issuer_url.rstrip("/"), "/.well-known/jwks.json" + ) + + def _set_cache(self) -> None: + self.jwks_cache_ttl = 3600 + self._jwks_cache = {} + self._jwks_cache_time = 0 + + def _set_http_client(self) -> None: + self.http_client = httpx.Client(timeout=30.0) + + def _set_rate_limit(self) -> None: + self.rate_limit_enabled = True + self.rate_limit_requests = 100 + self.rate_limit_window = 3600 + self._rate_limit_cache = {} + + def _set_validation_flags(self) -> None: + self.validate_exp = ( + self.config.get_env_var("M3_OAUTH2_VALIDATE_EXP", "true").lower() == "true" + ) + self.validate_aud = ( + self.config.get_env_var("M3_OAUTH2_VALIDATE_AUD", "true").lower() == "true" + ) + self.validate_iss = ( + self.config.get_env_var("M3_OAUTH2_VALIDATE_ISS", "true").lower() == "true" + ) + + + +================================================ +FILE: mimic/components/data_io.py +================================================ +import logging +from pathlib import Path +from urllib.parse import urljoin, urlparse + +import polars as pl +import requests +from beartype import beartype +from beartype.typing import Any, Dict, List +from bs4 import BeautifulSoup +from rich.console import Console +from rich.progress import Progress + +from m3.core.config import M3Config +from m3.core.utils.exceptions import M3ValidationError +from m3.tools.mimic.components.utils import ( + get_dataset_config, + get_dataset_raw_files_path, +) + +logger = logging.getLogger(__name__) + +COMMON_USER_AGENT = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36" + +console = Console() + + +@beartype +class DataIO: + def __init__(self, config: M3Config) -> None: + self.config = config + + def initialize(self, dataset: str, path: Path) -> bool: + dataset_config = self._get_dataset_config(dataset) + raw_files_root_dir = self._get_raw_files_path(dataset) + logger.info(f"Initializing {dataset} at {path}") + console.print( + f"[turquoise4]๐Ÿ’ฌ Initializing {dataset} at {path}...[/turquoise4]" + ) + + console.print("[cyan]Downloading dataset files...[/cyan]") + if not self._download_dataset_files(dataset_config, raw_files_root_dir): + logger.error(f"Download failed for {dataset}.") + console.print(f"[red]โŒ Download failed for {dataset}.[/red]") + return False + + console.print("[cyan]Loading files to SQLite...[/cyan]") + if not self._etl_csv_collection_to_sqlite(raw_files_root_dir, path): + logger.error(f"ETL failed for {dataset}.") + console.print(f"[red]โŒ ETL failed for {dataset}.[/red]") + return False + + logger.info(f"Successfully initialized {dataset}.") + console.print(f"[green]โœ… Successfully initialized {dataset}.[/green]") + return True + + def _get_dataset_config(self, dataset: str) -> Dict[str, Any]: + config = get_dataset_config(dataset) + if not config: + raise M3ValidationError(f"Config not found for '{dataset}'.") + return config + + def _get_raw_files_path(self, dataset: str) -> Path: + path = get_dataset_raw_files_path(self.config, dataset) + if path is None: + raise M3ValidationError(f"Raw files path not found for '{dataset}'.") + return path + + def _download_dataset_files( + self, + dataset_config: Dict[str, Any], + raw_files_root_dir: Path, + ) -> bool: + base_listing_url = dataset_config["file_listing_url"] + subdirs_to_scan = dataset_config.get("subdirectories_to_scan", []) + session = requests.Session() + session.headers.update({"User-Agent": COMMON_USER_AGENT}) + all_files_to_process = [] + for subdir_name in subdirs_to_scan: + subdir_listing_url = urljoin(base_listing_url, f"{subdir_name}/") + csv_urls_in_subdir = self._scrape_urls_from_html_page( + subdir_listing_url, session + ) + if not csv_urls_in_subdir: + continue + for file_url in csv_urls_in_subdir: + url_path_obj = Path(urlparse(file_url).path) + base_listing_url_path_obj = Path(urlparse(base_listing_url).path) + relative_file_path = ( + url_path_obj.relative_to(base_listing_url_path_obj) + if url_path_obj.as_posix().startswith( + base_listing_url_path_obj.as_posix() + ) + else Path(subdir_name) / url_path_obj.name + ) + local_target_path = raw_files_root_dir / relative_file_path + all_files_to_process.append((file_url, local_target_path)) + if not all_files_to_process: + return False + unique_files_to_process = sorted(set(all_files_to_process), key=lambda x: x[1]) + downloaded_count = 0 + for file_url, target_filepath in unique_files_to_process: + if not self._download_single_file(file_url, target_filepath, session): + return False + downloaded_count += 1 + return downloaded_count == len(unique_files_to_process) + + def _download_single_file( + self, url: str, target_filepath: Path, session: requests.Session + ) -> bool: + try: + response = session.get(url, stream=True, timeout=60) + response.raise_for_status() + total_size = int(response.headers.get("content-length", 0)) + target_filepath.parent.mkdir(parents=True, exist_ok=True) + with open(target_filepath, "wb") as file_object: + with Progress(console=console, transient=True) as progress: + task = progress.add_task( + f"[cyan]Downloading {target_filepath.name}", total=total_size + ) + for chunk in response.iter_content(chunk_size=8192): + if chunk: + file_object.write(chunk) + progress.update(task, advance=len(chunk)) + return True + except Exception as e: + logger.error(f"Download failed for {url}: {e}") + if target_filepath.exists(): + target_filepath.unlink() + console.print(f"[red]โŒ Download failed for {url}: {e}[/red]") + return False + + def _scrape_urls_from_html_page( + self, page_url: str, session: requests.Session, file_suffix: str = ".csv.gz" + ) -> List[str]: + found_urls = [] + try: + page_response = session.get(page_url, timeout=30) + page_response.raise_for_status() + soup = BeautifulSoup(page_response.content, "html.parser") + for link_tag in soup.find_all("a", href=True): + href_path = link_tag["href"] + if ( + href_path.endswith(file_suffix) + and not href_path.startswith(("?", "#")) + and ".." not in href_path + ): + absolute_url = urljoin(page_url, href_path) + found_urls.append(absolute_url) + except Exception as e: + logger.error(f"Scrape failed for {page_url}: {e}") + console.print(f"[red]โŒ Scrape failed for {page_url}: {e}[/red]") + return found_urls + + def _etl_csv_collection_to_sqlite( + self, csv_source_dir: Path, db_target_path: Path + ) -> bool: + db_target_path.parent.mkdir(parents=True, exist_ok=True) + db_connection_uri = f"sqlite:///{db_target_path.resolve()}" + csv_file_paths = list(csv_source_dir.rglob("*.csv.gz")) + if not csv_file_paths: + return False + successfully_loaded_count = 0 + files_with_errors = [] + with Progress(console=console) as progress: + total_task = progress.add_task( + "[cyan]Loading CSV files to SQLite...", total=len(csv_file_paths) + ) + for csv_file_path in csv_file_paths: + relative_path = csv_file_path.relative_to(csv_source_dir) + table_name_parts = [part.lower() for part in relative_path.parts] + table_name = ( + "_".join(table_name_parts) + .replace(".csv.gz", "") + .replace("-", "_") + .replace(".", "_") + ) + try: + dataframe = self._load_csv_with_robust_parsing( + csv_file_path, table_name + ) + dataframe.write_database( + table_name=table_name, + connection=db_connection_uri, + if_table_exists="replace", + engine="sqlalchemy", + ) + successfully_loaded_count += 1 + except Exception as e: + err_msg = ( + f"ETL error for '{relative_path}' (table '{table_name}'): {e}" + ) + logger.error(err_msg, exc_info=True) + files_with_errors.append(err_msg) + console.print(f"[red]โŒ {err_msg}[/red]") + progress.update(total_task, advance=1) + + if files_with_errors: + logger.warning(f"ETL errors in {len(files_with_errors)} files:") + for detail in files_with_errors: + logger.warning(f" - {detail}") + + return successfully_loaded_count == len(csv_file_paths) + + def _load_csv_with_robust_parsing( + self, csv_file_path: Path, table_name: str + ) -> pl.DataFrame: + try: + dataframe = pl.read_csv( + source=csv_file_path, + infer_schema_length=None, + try_parse_dates=True, + ignore_errors=False, + null_values=["", "NULL", "null", "\\N", "NA"], + ) + if dataframe.height > 0: + empty_columns = [ + column + for column in dataframe.columns + if dataframe[column].is_null().all() + ] + if empty_columns: + logger.debug(f"Empty columns in {table_name}: {empty_columns}") + return dataframe + except Exception as e: + raise M3ValidationError(f"Failed to parse CSV {csv_file_path}: {e}") from e + + + +================================================ +FILE: mimic/components/utils.py +================================================ +import logging +from pathlib import Path + +import yaml +from beartype import beartype +from beartype.typing import Any, Dict + +from m3.core.config import M3Config +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +def load_supported_datasets() -> Dict[str, Dict[str, Any]]: + yaml_path = Path(__file__).parent.parent / "configurations" / "datasets.yaml" + if not yaml_path.exists(): + raise RuntimeError(f"datasets.yaml not found at {yaml_path}") + with open(yaml_path) as f: + return yaml.safe_load(f) + + +@beartype +def get_dataset_config(dataset_name: str) -> Dict[str, Any] | None: + datasets = load_supported_datasets() + return datasets.get(dataset_name.lower()) + + +@beartype +def get_default_database_path(base_config: M3Config, dataset_name: str) -> Path | None: + cfg = get_dataset_config(dataset_name) + if not cfg: + return None + default_filename = cfg.get("default_db_filename", f"{dataset_name}.db") + env_key = f"M3_{dataset_name.upper()}_DATA_DIR" + default_dir_str = base_config.get_env_var(env_key) + default_dir = ( + Path(default_dir_str) + if default_dir_str + else base_config.databases_dir / dataset_name + ) + return default_dir / default_filename + + +@beartype +def get_dataset_raw_files_path(base_config: M3Config, dataset_name: str) -> Path | None: + cfg = get_dataset_config(dataset_name) + if not cfg: + logger.warning(f"Unknown dataset: {dataset_name}") + return None + env_key = f"M3_{dataset_name.upper()}_RAW_DIR" + raw_dir_str = base_config.get_env_var(env_key) + path = ( + Path(raw_dir_str) + if raw_dir_str + else base_config.raw_files_dir / dataset_name.lower() + ) + path.mkdir(parents=True, exist_ok=True) + return path + + +@beartype +def load_security_config() -> Dict[str, Any]: + yaml_path = Path(__file__).parent.parent / "configurations" / "security.yaml" + if not yaml_path.exists(): + raise RuntimeError(f"security.yaml not found at {yaml_path}") + with open(yaml_path) as f: + return yaml.safe_load(f) + + +@beartype +def load_env_vars_config() -> Dict[str, Any]: + yaml_path = Path(__file__).parent.parent / "configurations" / "env_vars.yaml" + if not yaml_path.exists(): + raise M3ValidationError(f"env_vars.yaml not found at {yaml_path}") + try: + with open(yaml_path) as f: + config = yaml.safe_load(f) + if not isinstance(config, dict): + raise ValueError("Invalid YAML structure; expected a dictionary.") + logger.debug(f"Loaded env_vars.yaml from {yaml_path}") + return config + except (yaml.YAMLError, ValueError) as e: + raise M3ValidationError(f"Failed to load env_vars.yaml: {e}") from e + + +def validate_limit(limit: int) -> bool: + return isinstance(limit, int) and 0 < limit <= 1000 + + + +================================================ +FILE: mimic/configurations/datasets.yaml +================================================ +mimic-iv-demo: + file_listing_url: "https://physionet.org/files/mimic-iv-demo/2.2/" + subdirectories_to_scan: ["hosp", "icu"] + default_db_filename: "mimic_iv_demo.db" + primary_verification_table: "hosp_admissions" + + + +================================================ +FILE: mimic/configurations/env_vars.yaml +================================================ +core: + - key: M3_BACKEND + default: sqlite + required: false + description: "Backend type ('sqlite' or 'bigquery'). Determines data storage and query engine." + +backends: + sqlite: + - key: M3_DB_PATH + default: null + required: false + description: "Path to SQLite database file." + - key: M3_ICUSTAYS_TABLE + default: icu_icustays + required: false + description: "Table name for ICU stays in SQLite." + - key: M3_LABEVENTS_TABLE + default: hosp_labevents + required: false + description: "Table name for lab events in SQLite." + - key: M3_ADMISSIONS_TABLE + default: hosp_admissions + required: false + description: "Table name for admissions in SQLite." + bigquery: + - key: M3_PROJECT_ID + default: null + required: true + description: "GCP Project ID for BigQuery." + - key: GOOGLE_CLOUD_PROJECT + default: null + required: true + description: "GCP Project ID (alias for M3_PROJECT_ID)." + - key: M3_BIGQUERY_PREFIX + default: "`physionet-data.mimiciv_3_1_" + required: false + description: "Prefix for BigQuery table names." + - key: M3_BIGQUERY_HOSP_DATASET + default: mimiciv_3_1_hosp + required: false + description: "BigQuery dataset for hospital data." + - key: M3_BIGQUERY_ICU_DATASET + default: mimiciv_3_1_icu + required: false + description: "BigQuery dataset for ICU data." + - key: M3_BIGQUERY_PROJECT + default: physionet-data + required: false + description: "BigQuery project name." + +oauth2: + - key: M3_OAUTH2_ENABLED + default: false + required: false + description: "Enable OAuth2 authentication (true/false)." + - key: M3_OAUTH2_ISSUER_URL + default: null + required: true + description: "OAuth2 issuer URL. (Required if OAuth2 is enabled.)" + - key: M3_OAUTH2_AUDIENCE + default: null + required: true + description: "OAuth2 audience. (Required if OAuth2 is enabled.)" + - key: M3_OAUTH2_REQUIRED_SCOPES + default: read:mimic-data + required: false + description: "Required OAuth2 scopes (comma-separated)." + - key: M3_OAUTH2_JWKS_URL + default: null + required: false + description: "OAuth2 JWKS URL (optional; auto-derived if unset)." + - key: M3_OAUTH2_VALIDATE_EXP + default: true + required: false + description: "Validate token expiration (true/false)." + - key: M3_OAUTH2_VALIDATE_AUD + default: true + required: false + description: "Validate token audience (true/false)." + - key: M3_OAUTH2_VALIDATE_ISS + default: true + required: false + description: "Validate token issuer (true/false)." + - key: M3_OAUTH2_TOKEN + default: "" + required: false + description: "OAuth2 access token (set at runtime)." + + + +================================================ +FILE: mimic/configurations/security.yaml +================================================ +dangerous_keywords: + - INSERT + - UPDATE + - DELETE + - DROP + - CREATE + - ALTER + - TRUNCATE + - REPLACE + - MERGE + - EXEC + - EXECUTE + +injection_patterns: + - ["1=1", "Classic injection pattern"] + - ["OR 1=1", "Boolean injection pattern"] + - ["AND 1=1", "Boolean injection pattern"] + - ["OR '1'='1'", "String injection pattern"] + - ["AND '1'='1'", "String injection pattern"] + - ["WAITFOR", "Time-based injection"] + - ["SLEEP(", "Time-based injection"] + - ["BENCHMARK(", "Time-based injection"] + - ["LOAD_FILE(", "File access injection"] + - ["INTO OUTFILE", "File write injection"] + - ["INTO DUMPFILE", "File write injection"] + +suspicious_names: + - PASSWORD + - ADMIN + - USER + - LOGIN + - AUTH + - TOKEN + - CREDENTIAL + - SECRET + - KEY + - HASH + - SALT + - SESSION + - COOKIE diff --git a/src/m3/tools/m3toolbuilder/m3toolbuilder.py b/src/m3/tools/m3toolbuilder/m3toolbuilder.py new file mode 100644 index 0000000..5fecd8d --- /dev/null +++ b/src/m3/tools/m3toolbuilder/m3toolbuilder.py @@ -0,0 +1,299 @@ +import logging +from collections.abc import Callable +from pathlib import Path + +import yaml +from beartype import beartype +from beartype.typing import Any, Dict, List, Optional + +from m3.core.config import M3Config +from m3.core.tool.base import BaseTool +from m3.core.utils.exceptions import M3ValidationError + +logger = logging.getLogger(__name__) + + +@beartype +class M3ToolBuilder(BaseTool): + """M3ToolBuilder: Intelligent tool generator for M3 ecosystem using knowledge base to create new tools.""" + + def __init__( + self, + config: Optional[M3Config] = None, + knowledge_path: Optional[Path] = None, + ) -> None: + super().__init__() + self.config = config or M3Config() + default_knowledge_path = ( + Path(__file__).parent / "configurations" / "knowledge.yaml" + ) + try: + env_knowledge_path_str = self.config.get_env_var( + "M3_TOOLBUILDER_KNOWLEDGE_PATH", + default=None, + raise_if_missing=False, + ) + resolved_knowledge_path = knowledge_path or ( + Path(env_knowledge_path_str).resolve() + if env_knowledge_path_str + else default_knowledge_path.resolve() + ) + self.knowledge_path = resolved_knowledge_path + logger.info(f"Resolved knowledge path: {self.knowledge_path}") + self.knowledge = self._load_knowledge() + except (OSError, ValueError) as exc: + logger.error(f"Failed to resolve knowledge path: {exc}") + raise M3ValidationError( + f"Invalid knowledge path configuration: {exc}" + ) from exc + + def to_dict(self) -> Dict[str, Any]: + return { + "knowledge_path": str(self.knowledge_path), + } + + @classmethod + def from_dict(cls, params: Dict[str, Any]) -> "M3ToolBuilder": + knowledge_path: Optional[Path] = ( + Path(params["knowledge_path"]) if "knowledge_path" in params else None + ) + return cls( + knowledge_path=knowledge_path, + ) + + def actions(self) -> List[Callable]: + return [ + self.about_m3_tools, + self.best_practices, + self.generate_tool, + ] + + def generate_tool( + self, + tool_name: str, + description: str, + features: List[str], + ) -> Dict[str, str]: + try: + best_practices = self.best_practices(tool_name) + about_m3_tools = self.about_m3_tools() + tool_code = self._generate_tool(tool_name, description, features) + cli_code = self._generate_cli(tool_name) + init_code = self._generate_init(tool_name) + structure = self._generate_structure(tool_name, print_output=True) + return { + "best_practices": best_practices, + "about_m3_tools": about_m3_tools, + "tool_code": tool_code, + "cli_code": cli_code, + "init_code": init_code, + "structure": structure, + } + except Exception as exc: + logger.error(f"Error generating full tool for {tool_name}: {exc}") + raise M3ValidationError(f"Failed to generate full tool: {exc}") from exc + + def _generate_tool( + self, + tool_name: str, + description: str, + features: List[str], + ) -> str: + tool_name_cap = tool_name.capitalize() + formatted_features = "\n".join(f" - {feature}" for feature in features) + template = """ +import logging +from collections.abc import Callable + +from beartype import beartype +from beartype.typing import Any, Dict, List # No from typing, use beartype.typing, very important. + +from m3.core.tool.base import BaseTool # Do not forget the .base + +logger = logging.getLogger(__name__) + +@beartype +class {tool_name_cap}(BaseTool): + \"\"\"{description}\"\"\" + + def __init__(self) -> None: + super().__init__() + self.required_env_vars = {{}} # Add required env vars here, e.g., {{"MY_VAR": None}} for mandatory, or default value + + def actions(self) -> List[Callable]: + return [] # Add action functions here + + def initialize(self) -> None: + # Optional: Add setup logic, e.g., backend initialization, connections + # Features: + # {features} + pass + + def teardown(self) -> None: + # Optional: Add cleanup logic, e.g., close connections, release resources + pass + + def to_dict(self) -> Dict[str, Any]: + # Implement serialization: return dictionary of init params/state for saving + return {{}} + + @classmethod + def from_dict(cls, params: Dict[str, Any]) -> "{tool_name_cap}": + # Implement deserialization: reconstruct instance from params + return cls() +""" + code = template.format( + tool_name_cap=tool_name_cap, + description=description, + features=formatted_features, + ) + return code.strip() + + def _generate_cli( + self, + tool_name: str, + ) -> str: + tool_name_cap = tool_name.capitalize() + tool_name_lower = tool_name.lower() + template = """ +import typer +from beartype import beartype +from beartype.typing import Optional +from rich.console import Console + +from m3.core.tool.cli.base import BaseToolCLI, ToolConfig + +console = Console() + +@beartype +class {tool_name_cap}CLI(BaseToolCLI): + \"\"\"{tool_name_cap} Command Line Interface.\"\"\" + + @classmethod + def get_app(cls) -> Optional[typer.Typer]: + app = typer.Typer( + help="{tool_name_cap} tool commands.", + add_completion=False, + pretty_exceptions_show_locals=False, + rich_markup_mode="markdown", + ) + app.command(help="Initialize the {tool_name_cap} tool.")(cls.init) + app.command(help="Configure the {tool_name_cap} tool.")(cls.configure) + app.command(help="Display the current status of the {tool_name_cap} tool.")(cls.status) + return app + + @classmethod + def init(cls) -> dict: + pass # Implement init logic + + @classmethod + def configure(cls) -> ToolConfig: + pass # Implement configure logic + + @classmethod + def status(cls, verbose: bool = False) -> None: + pass # Implement status logic +""" + code = template.format( + tool_name_cap=tool_name_cap, + tool_name_lower=tool_name_lower, + ) + return code.strip() + + def _generate_init( + self, + tool_name: str, + ) -> str: + tool_name_cap = tool_name.capitalize() + tool_name_lower = tool_name.lower() + template = """ +from .{tool_name_lower} import {tool_name_cap} +from .cli import {tool_name_cap}CLI + +__all__ = [ + "{tool_name_cap}", + "{tool_name_cap}CLI", +] +""" + code = template.format( + tool_name_cap=tool_name_cap, + tool_name_lower=tool_name_lower, + ) + return code.strip() + + def about_m3_tools(self) -> str: + return "\n\n".join( + [ + self.knowledge["principles"]["idea_behind_m3_tools"], + self.knowledge["principles"]["importance_of_architecture"], + self.knowledge["principles"]["what_is_a_tool"], + ] + ) + + def best_practices(self, tool_name: str) -> str: + practices = "\n\n".join( + [ + self.knowledge["principles"]["python_best_practices"], + ] + ) + practices = practices.format(tool_name=tool_name) + return practices + + def _generate_structure(self, tool_name: str, print_output: bool = False) -> str: + tool_name_lower = tool_name.lower() + template = """ +Recommended directory structure and file names for the '{tool_name_cap}' tool: + +m3/ +โ””โ”€โ”€ tools/ + โ””โ”€โ”€ {tool_name_lower}/ + โ”œโ”€โ”€ __init__.py # Exports the tool and CLI classes + โ”œโ”€โ”€ {tool_name_lower}.py # Main tool class implementation + โ”œโ”€โ”€ cli.py # CLI class for command-line interactions + โ””โ”€โ”€ configurations/ # Optional directory for YAML configs + โ””โ”€โ”€ knowledge.yaml # Or other config files as needed; prefer YAML over hardcoding in code +""" + structure = template.format( + tool_name_cap=tool_name.capitalize(), + tool_name_lower=tool_name_lower, + ).strip() + if print_output: + logger.info( + f"Generated directory structure for '{tool_name}':\n{structure}" + ) + return structure + + def load_m3_codebase(self) -> str: + codebase_path = self.knowledge_path.parent / "tools_codebase.txt" + if not codebase_path.exists(): + raise M3ValidationError(f"Tools codebase file not found at {codebase_path}") + try: + return codebase_path.read_text() + except OSError as os_error: + logger.error(f"Failed to read tools codebase file: {os_error}") + raise M3ValidationError( + f"File access error for tools codebase: {os_error}" + ) from os_error + + def _load_knowledge(self) -> Dict[str, Any]: + if not self.knowledge_path.exists(): + raise M3ValidationError( + f"Knowledge YAML not found at {self.knowledge_path}" + ) + try: + with open(self.knowledge_path) as file_handle: + return yaml.safe_load(file_handle) + except yaml.YAMLError as yaml_error: + logger.error( + f"Failed to parse knowledge YAML at {self.knowledge_path}: {yaml_error}" + ) + raise M3ValidationError( + f"Invalid YAML in knowledge file: {yaml_error}" + ) from yaml_error + except OSError as os_error: + logger.error( + f"Failed to open knowledge file at {self.knowledge_path}: {os_error}" + ) + raise M3ValidationError( + f"File access error for knowledge YAML: {os_error}" + ) from os_error From 9ffe855dfe273ff0f183786999c1f5152dc07293 Mon Sep 17 00:00:00 2001 From: Provost Simon Date: Mon, 21 Jul 2025 11:03:37 +0100 Subject: [PATCH 15/15] feat(tools): add m3_tool_builder CLI --- src/m3/tools/m3toolbuilder/__init__.py | 7 ++ src/m3/tools/m3toolbuilder/cli.py | 103 +++++++++++++++++++++++++ 2 files changed, 110 insertions(+) create mode 100644 src/m3/tools/m3toolbuilder/__init__.py create mode 100644 src/m3/tools/m3toolbuilder/cli.py diff --git a/src/m3/tools/m3toolbuilder/__init__.py b/src/m3/tools/m3toolbuilder/__init__.py new file mode 100644 index 0000000..16e60fc --- /dev/null +++ b/src/m3/tools/m3toolbuilder/__init__.py @@ -0,0 +1,7 @@ +from .cli import M3ToolBuilderCLI +from .m3toolbuilder import M3ToolBuilder + +__all__ = [ + "M3ToolBuilder", + "M3ToolBuilderCLI", +] diff --git a/src/m3/tools/m3toolbuilder/cli.py b/src/m3/tools/m3toolbuilder/cli.py new file mode 100644 index 0000000..b976caa --- /dev/null +++ b/src/m3/tools/m3toolbuilder/cli.py @@ -0,0 +1,103 @@ +import json +import logging +import os +from pathlib import Path + +import typer +from beartype import beartype +from beartype.typing import Annotated, Dict, Optional +from rich.console import Console + +from m3.core.tool.cli.base import BaseToolCLI, ToolConfig + +logger = logging.getLogger(__name__) + +console = Console() + + +@beartype +class M3ToolBuilderCLI(BaseToolCLI): + """M3ToolBuilder Command Line Interface.""" + + @classmethod + def get_app(cls) -> Optional[typer.Typer]: + app = typer.Typer( + help="M3ToolBuilder tool commands.", + add_completion=False, + pretty_exceptions_show_locals=False, + rich_markup_mode="markdown", + ) + app.command(help="Initialize the M3ToolBuilder tool.")(cls.init) + app.command(help="Configure the M3ToolBuilder tool.")(cls.configure) + app.command(help="Display the current status of the M3ToolBuilder tool.")( + cls.status + ) + return app + + @classmethod + def init( + cls, + dataset: Annotated[ + Optional[str], + typer.Option( + "--dataset", + help="Dataset to initialize (e.g., 'mimic-iv-demo'). If not provided, uses default knowledge base.", + ), + ] = None, + ) -> Dict[str, str]: + console.print("[turquoise4]๐Ÿ’ฌ Initializing M3ToolBuilder...[/turquoise4]") + knowledge_path = Path(__file__).parent / "configurations" / "knowledge.yaml" + if not knowledge_path.exists(): + raise ValueError(f"Knowledge base not found at {knowledge_path}") + console.print("[green]โœ… Knowledge base loaded successfully.[/green]") + return {"status": "initialized", "dataset": dataset or "default"} + + @classmethod + def configure( + cls, + output: Annotated[ + Optional[str], + typer.Option( + "--output", + "-o", + help="Path to save configuration JSON (defaults to 'm3toolbuilder_config.json').", + ), + ] = "m3toolbuilder_config.json", + verbose: Annotated[ + bool, + typer.Option("--verbose", "-v", help="Enable verbose output."), + ] = False, + ) -> ToolConfig: + console.print("[turquoise4]๐Ÿ’ฌ Configuring M3ToolBuilder...[/turquoise4]") + env_vars = { + "M3_TOOLBUILDER_KNOWLEDGE_PATH": str( + Path(__file__).parent / "configurations" / "knowledge.yaml" + ), + } + tool_params = {} + config_dict = {"env_vars": env_vars, "tool_params": tool_params} + + with open(output, "w") as f: + json.dump(config_dict, f, indent=4) + console.print(f"[green]โœ… Config saved to {output}.[/green]") + + if verbose: + console.print(json.dumps(config_dict, indent=2)) + return config_dict + + @classmethod + def status( + cls, + verbose: Annotated[ + bool, + typer.Option("--verbose", "-v", help="Enable verbose output."), + ] = False, + ) -> None: + console.print("[turquoise4]๐Ÿ’ฌ M3ToolBuilder Status:[/turquoise4]") + knowledge_path = os.getenv("M3_TOOLBUILDER_KNOWLEDGE_PATH", "Not set") + console.print(f"Knowledge Base: {knowledge_path}") + if verbose: + console.print("Environment Variables:") + for key, value in os.environ.items(): + if key.startswith("M3_TOOLBUILDER_"): + console.print(f" {key}: {value}")